From 510b341734e429f85830f2623e28513a393d193a Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 22 Apr 2024 19:03:27 +0000 Subject: [PATCH] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20post-p?= =?UTF-8?q?rocessor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- owl-bot-staging/datastore/v1/.coveragerc | 13 - owl-bot-staging/datastore/v1/.flake8 | 33 - owl-bot-staging/datastore/v1/MANIFEST.in | 2 - owl-bot-staging/datastore/v1/README.rst | 49 - .../datastore/v1/docs/_static/custom.css | 3 - owl-bot-staging/datastore/v1/docs/conf.py | 376 -- .../v1/docs/datastore_v1/datastore.rst | 6 - .../v1/docs/datastore_v1/services_.rst | 6 - .../datastore/v1/docs/datastore_v1/types_.rst | 6 - owl-bot-staging/datastore/v1/docs/index.rst | 7 - .../v1/google/cloud/datastore/__init__.py | 115 - .../google/cloud/datastore/gapic_version.py | 16 - .../v1/google/cloud/datastore/py.typed | 2 - .../v1/google/cloud/datastore_v1/__init__.py | 116 - .../cloud/datastore_v1/gapic_metadata.json | 148 - .../cloud/datastore_v1/gapic_version.py | 16 - .../v1/google/cloud/datastore_v1/py.typed | 2 - .../cloud/datastore_v1/services/__init__.py | 15 - .../services/datastore/__init__.py | 22 - .../services/datastore/async_client.py | 1383 ---- .../datastore_v1/services/datastore/client.py | 1740 ----- .../services/datastore/transports/__init__.py | 38 - .../services/datastore/transports/base.py | 316 - .../services/datastore/transports/grpc.py | 528 -- .../datastore/transports/grpc_asyncio.py | 527 -- .../services/datastore/transports/rest.py | 1475 ---- .../cloud/datastore_v1/types/__init__.py | 116 - .../datastore_v1/types/aggregation_result.py | 101 - .../cloud/datastore_v1/types/datastore.py | 1027 --- .../google/cloud/datastore_v1/types/entity.py | 394 -- .../google/cloud/datastore_v1/types/query.py | 904 --- .../cloud/datastore_v1/types/query_profile.py | 144 - owl-bot-staging/datastore/v1/mypy.ini | 3 - owl-bot-staging/datastore/v1/noxfile.py | 253 - ..._generated_datastore_allocate_ids_async.py | 52 - ...1_generated_datastore_allocate_ids_sync.py | 52 - ...rated_datastore_begin_transaction_async.py | 52 - ...erated_datastore_begin_transaction_sync.py | 52 - ...ore_v1_generated_datastore_commit_async.py | 53 - ...tore_v1_generated_datastore_commit_sync.py | 53 - ...ore_v1_generated_datastore_lookup_async.py | 52 - ...tore_v1_generated_datastore_lookup_sync.py | 52 - ...1_generated_datastore_reserve_ids_async.py | 52 - ...v1_generated_datastore_reserve_ids_sync.py | 52 - ...e_v1_generated_datastore_rollback_async.py | 53 - ...re_v1_generated_datastore_rollback_sync.py | 53 - ...d_datastore_run_aggregation_query_async.py | 52 - ...ed_datastore_run_aggregation_query_sync.py | 52 - ..._v1_generated_datastore_run_query_async.py | 52 - ...e_v1_generated_datastore_run_query_sync.py | 52 - .../snippet_metadata_google.datastore.v1.json | 1351 ---- .../v1/scripts/fixup_datastore_v1_keywords.py | 183 - owl-bot-staging/datastore/v1/setup.py | 93 - .../datastore/v1/testing/constraints-3.10.txt | 6 - .../datastore/v1/testing/constraints-3.11.txt | 6 - .../datastore/v1/testing/constraints-3.12.txt | 6 - .../datastore/v1/testing/constraints-3.7.txt | 10 - .../datastore/v1/testing/constraints-3.8.txt | 6 - .../datastore/v1/testing/constraints-3.9.txt | 6 - .../datastore/v1/tests/__init__.py | 16 - .../datastore/v1/tests/unit/__init__.py | 16 - .../datastore/v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/datastore_v1/__init__.py | 16 - .../unit/gapic/datastore_v1/test_datastore.py | 5984 ----------------- .../datastore_admin/v1/.coveragerc | 13 - owl-bot-staging/datastore_admin/v1/.flake8 | 33 - .../datastore_admin/v1/MANIFEST.in | 2 - owl-bot-staging/datastore_admin/v1/README.rst | 49 - .../v1/docs/_static/custom.css | 3 - .../datastore_admin/v1/docs/conf.py | 376 -- .../datastore_admin_v1/datastore_admin.rst | 10 - .../v1/docs/datastore_admin_v1/services_.rst | 6 - .../v1/docs/datastore_admin_v1/types_.rst | 6 - .../datastore_admin/v1/docs/index.rst | 7 - .../google/cloud/datastore_admin/__init__.py | 69 - .../cloud/datastore_admin/gapic_version.py | 16 - .../v1/google/cloud/datastore_admin/py.typed | 2 - .../cloud/datastore_admin_v1/__init__.py | 70 - .../datastore_admin_v1/gapic_metadata.json | 118 - .../cloud/datastore_admin_v1/gapic_version.py | 16 - .../google/cloud/datastore_admin_v1/py.typed | 2 - .../datastore_admin_v1/services/__init__.py | 15 - .../services/datastore_admin/__init__.py | 22 - .../services/datastore_admin/async_client.py | 1285 ---- .../services/datastore_admin/client.py | 1616 ----- .../services/datastore_admin/pagers.py | 140 - .../datastore_admin/transports/__init__.py | 38 - .../datastore_admin/transports/base.py | 281 - .../datastore_admin/transports/grpc.py | 584 -- .../transports/grpc_asyncio.py | 583 -- .../datastore_admin/transports/rest.py | 1271 ---- .../datastore_admin_v1/types/__init__.py | 66 - .../types/datastore_admin.py | 639 -- .../cloud/datastore_admin_v1/types/index.py | 180 - .../datastore_admin_v1/types/migration.py | 200 - owl-bot-staging/datastore_admin/v1/mypy.ini | 3 - owl-bot-staging/datastore_admin/v1/noxfile.py | 253 - ...ated_datastore_admin_create_index_async.py | 55 - ...rated_datastore_admin_create_index_sync.py | 55 - ...ated_datastore_admin_delete_index_async.py | 55 - ...rated_datastore_admin_delete_index_sync.py | 55 - ...d_datastore_admin_export_entities_async.py | 57 - ...ed_datastore_admin_export_entities_sync.py | 57 - ...nerated_datastore_admin_get_index_async.py | 51 - ...enerated_datastore_admin_get_index_sync.py | 51 - ...d_datastore_admin_import_entities_async.py | 57 - ...ed_datastore_admin_import_entities_sync.py | 57 - ...ated_datastore_admin_list_indexes_async.py | 52 - ...rated_datastore_admin_list_indexes_sync.py | 52 - ...et_metadata_google.datastore.admin.v1.json | 997 --- .../fixup_datastore_admin_v1_keywords.py | 181 - owl-bot-staging/datastore_admin/v1/setup.py | 93 - .../v1/testing/constraints-3.10.txt | 6 - .../v1/testing/constraints-3.11.txt | 6 - .../v1/testing/constraints-3.12.txt | 6 - .../v1/testing/constraints-3.7.txt | 10 - .../v1/testing/constraints-3.8.txt | 6 - .../v1/testing/constraints-3.9.txt | 6 - .../datastore_admin/v1/tests/__init__.py | 16 - .../datastore_admin/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/datastore_admin_v1/__init__.py | 16 - .../test_datastore_admin.py | 4867 -------------- 123 files changed, 33323 deletions(-) delete mode 100644 owl-bot-staging/datastore/v1/.coveragerc delete mode 100644 owl-bot-staging/datastore/v1/.flake8 delete mode 100644 owl-bot-staging/datastore/v1/MANIFEST.in delete mode 100644 owl-bot-staging/datastore/v1/README.rst delete mode 100644 owl-bot-staging/datastore/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/datastore/v1/docs/conf.py delete mode 100644 owl-bot-staging/datastore/v1/docs/datastore_v1/datastore.rst delete mode 100644 owl-bot-staging/datastore/v1/docs/datastore_v1/services_.rst delete mode 100644 owl-bot-staging/datastore/v1/docs/datastore_v1/types_.rst delete mode 100644 owl-bot-staging/datastore/v1/docs/index.rst delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore/gapic_version.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore/py.typed delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/gapic_version.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/py.typed delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/async_client.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/client.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/base.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/grpc.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/rest.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/aggregation_result.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/datastore.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/entity.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/query.py delete mode 100644 owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/query_profile.py delete mode 100644 owl-bot-staging/datastore/v1/mypy.ini delete mode 100644 owl-bot-staging/datastore/v1/noxfile.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_allocate_ids_async.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_allocate_ids_sync.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_begin_transaction_async.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_begin_transaction_sync.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_commit_async.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_commit_sync.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_lookup_async.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_lookup_sync.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_reserve_ids_async.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_reserve_ids_sync.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_rollback_async.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_rollback_sync.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_aggregation_query_async.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_aggregation_query_sync.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_query_async.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_query_sync.py delete mode 100644 owl-bot-staging/datastore/v1/samples/generated_samples/snippet_metadata_google.datastore.v1.json delete mode 100644 owl-bot-staging/datastore/v1/scripts/fixup_datastore_v1_keywords.py delete mode 100644 owl-bot-staging/datastore/v1/setup.py delete mode 100644 owl-bot-staging/datastore/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/datastore/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/datastore/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/datastore/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/datastore/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/datastore/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/datastore/v1/tests/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/tests/unit/gapic/datastore_v1/__init__.py delete mode 100644 owl-bot-staging/datastore/v1/tests/unit/gapic/datastore_v1/test_datastore.py delete mode 100644 owl-bot-staging/datastore_admin/v1/.coveragerc delete mode 100644 owl-bot-staging/datastore_admin/v1/.flake8 delete mode 100644 owl-bot-staging/datastore_admin/v1/MANIFEST.in delete mode 100644 owl-bot-staging/datastore_admin/v1/README.rst delete mode 100644 owl-bot-staging/datastore_admin/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/datastore_admin/v1/docs/conf.py delete mode 100644 owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/datastore_admin.rst delete mode 100644 owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/services_.rst delete mode 100644 owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/types_.rst delete mode 100644 owl-bot-staging/datastore_admin/v1/docs/index.rst delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/gapic_version.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/py.typed delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/gapic_version.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/py.typed delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/client.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/datastore_admin.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/index.py delete mode 100644 owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/migration.py delete mode 100644 owl-bot-staging/datastore_admin/v1/mypy.ini delete mode 100644 owl-bot-staging/datastore_admin/v1/noxfile.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_create_index_async.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_create_index_sync.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_delete_index_async.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_delete_index_sync.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_export_entities_async.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_export_entities_sync.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_get_index_async.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_get_index_sync.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_import_entities_async.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_import_entities_sync.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_list_indexes_async.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_list_indexes_sync.py delete mode 100644 owl-bot-staging/datastore_admin/v1/samples/generated_samples/snippet_metadata_google.datastore.admin.v1.json delete mode 100644 owl-bot-staging/datastore_admin/v1/scripts/fixup_datastore_admin_v1_keywords.py delete mode 100644 owl-bot-staging/datastore_admin/v1/setup.py delete mode 100644 owl-bot-staging/datastore_admin/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/datastore_admin/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/datastore_admin/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/datastore_admin/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/datastore_admin/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/datastore_admin/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/datastore_admin/v1/tests/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/tests/unit/gapic/datastore_admin_v1/__init__.py delete mode 100644 owl-bot-staging/datastore_admin/v1/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py diff --git a/owl-bot-staging/datastore/v1/.coveragerc b/owl-bot-staging/datastore/v1/.coveragerc deleted file mode 100644 index c6913784..00000000 --- a/owl-bot-staging/datastore/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/datastore/__init__.py - google/cloud/datastore/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/datastore/v1/.flake8 b/owl-bot-staging/datastore/v1/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/datastore/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/datastore/v1/MANIFEST.in b/owl-bot-staging/datastore/v1/MANIFEST.in deleted file mode 100644 index e2acc8c2..00000000 --- a/owl-bot-staging/datastore/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/datastore *.py -recursive-include google/cloud/datastore_v1 *.py diff --git a/owl-bot-staging/datastore/v1/README.rst b/owl-bot-staging/datastore/v1/README.rst deleted file mode 100644 index 39994638..00000000 --- a/owl-bot-staging/datastore/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Datastore API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Datastore API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/datastore/v1/docs/_static/custom.css b/owl-bot-staging/datastore/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0..00000000 --- a/owl-bot-staging/datastore/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/datastore/v1/docs/conf.py b/owl-bot-staging/datastore/v1/docs/conf.py deleted file mode 100644 index 30b62c8a..00000000 --- a/owl-bot-staging/datastore/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-datastore documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-datastore" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-datastore-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-datastore.tex", - u"google-cloud-datastore Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-datastore", - u"Google Cloud Datastore Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-datastore", - u"google-cloud-datastore Documentation", - author, - "google-cloud-datastore", - "GAPIC library for Google Cloud Datastore API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/datastore/v1/docs/datastore_v1/datastore.rst b/owl-bot-staging/datastore/v1/docs/datastore_v1/datastore.rst deleted file mode 100644 index e1e5e266..00000000 --- a/owl-bot-staging/datastore/v1/docs/datastore_v1/datastore.rst +++ /dev/null @@ -1,6 +0,0 @@ -Datastore ---------------------------- - -.. automodule:: google.cloud.datastore_v1.services.datastore - :members: - :inherited-members: diff --git a/owl-bot-staging/datastore/v1/docs/datastore_v1/services_.rst b/owl-bot-staging/datastore/v1/docs/datastore_v1/services_.rst deleted file mode 100644 index 2963c364..00000000 --- a/owl-bot-staging/datastore/v1/docs/datastore_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Datastore v1 API -========================================== -.. toctree:: - :maxdepth: 2 - - datastore diff --git a/owl-bot-staging/datastore/v1/docs/datastore_v1/types_.rst b/owl-bot-staging/datastore/v1/docs/datastore_v1/types_.rst deleted file mode 100644 index 3ef6fbfc..00000000 --- a/owl-bot-staging/datastore/v1/docs/datastore_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Datastore v1 API -======================================= - -.. automodule:: google.cloud.datastore_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/datastore/v1/docs/index.rst b/owl-bot-staging/datastore/v1/docs/index.rst deleted file mode 100644 index 568eecfe..00000000 --- a/owl-bot-staging/datastore/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - datastore_v1/services - datastore_v1/types diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore/__init__.py b/owl-bot-staging/datastore/v1/google/cloud/datastore/__init__.py deleted file mode 100644 index f200b986..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore/__init__.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.datastore import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.datastore_v1.services.datastore.client import DatastoreClient -from google.cloud.datastore_v1.services.datastore.async_client import DatastoreAsyncClient - -from google.cloud.datastore_v1.types.aggregation_result import AggregationResult -from google.cloud.datastore_v1.types.aggregation_result import AggregationResultBatch -from google.cloud.datastore_v1.types.datastore import AllocateIdsRequest -from google.cloud.datastore_v1.types.datastore import AllocateIdsResponse -from google.cloud.datastore_v1.types.datastore import BeginTransactionRequest -from google.cloud.datastore_v1.types.datastore import BeginTransactionResponse -from google.cloud.datastore_v1.types.datastore import CommitRequest -from google.cloud.datastore_v1.types.datastore import CommitResponse -from google.cloud.datastore_v1.types.datastore import LookupRequest -from google.cloud.datastore_v1.types.datastore import LookupResponse -from google.cloud.datastore_v1.types.datastore import Mutation -from google.cloud.datastore_v1.types.datastore import MutationResult -from google.cloud.datastore_v1.types.datastore import ReadOptions -from google.cloud.datastore_v1.types.datastore import ReserveIdsRequest -from google.cloud.datastore_v1.types.datastore import ReserveIdsResponse -from google.cloud.datastore_v1.types.datastore import RollbackRequest -from google.cloud.datastore_v1.types.datastore import RollbackResponse -from google.cloud.datastore_v1.types.datastore import RunAggregationQueryRequest -from google.cloud.datastore_v1.types.datastore import RunAggregationQueryResponse -from google.cloud.datastore_v1.types.datastore import RunQueryRequest -from google.cloud.datastore_v1.types.datastore import RunQueryResponse -from google.cloud.datastore_v1.types.datastore import TransactionOptions -from google.cloud.datastore_v1.types.entity import ArrayValue -from google.cloud.datastore_v1.types.entity import Entity -from google.cloud.datastore_v1.types.entity import Key -from google.cloud.datastore_v1.types.entity import PartitionId -from google.cloud.datastore_v1.types.entity import Value -from google.cloud.datastore_v1.types.query import AggregationQuery -from google.cloud.datastore_v1.types.query import CompositeFilter -from google.cloud.datastore_v1.types.query import EntityResult -from google.cloud.datastore_v1.types.query import Filter -from google.cloud.datastore_v1.types.query import GqlQuery -from google.cloud.datastore_v1.types.query import GqlQueryParameter -from google.cloud.datastore_v1.types.query import KindExpression -from google.cloud.datastore_v1.types.query import Projection -from google.cloud.datastore_v1.types.query import PropertyFilter -from google.cloud.datastore_v1.types.query import PropertyOrder -from google.cloud.datastore_v1.types.query import PropertyReference -from google.cloud.datastore_v1.types.query import Query -from google.cloud.datastore_v1.types.query import QueryResultBatch -from google.cloud.datastore_v1.types.query_profile import ExecutionStats -from google.cloud.datastore_v1.types.query_profile import ExplainMetrics -from google.cloud.datastore_v1.types.query_profile import ExplainOptions -from google.cloud.datastore_v1.types.query_profile import PlanSummary - -__all__ = ('DatastoreClient', - 'DatastoreAsyncClient', - 'AggregationResult', - 'AggregationResultBatch', - 'AllocateIdsRequest', - 'AllocateIdsResponse', - 'BeginTransactionRequest', - 'BeginTransactionResponse', - 'CommitRequest', - 'CommitResponse', - 'LookupRequest', - 'LookupResponse', - 'Mutation', - 'MutationResult', - 'ReadOptions', - 'ReserveIdsRequest', - 'ReserveIdsResponse', - 'RollbackRequest', - 'RollbackResponse', - 'RunAggregationQueryRequest', - 'RunAggregationQueryResponse', - 'RunQueryRequest', - 'RunQueryResponse', - 'TransactionOptions', - 'ArrayValue', - 'Entity', - 'Key', - 'PartitionId', - 'Value', - 'AggregationQuery', - 'CompositeFilter', - 'EntityResult', - 'Filter', - 'GqlQuery', - 'GqlQueryParameter', - 'KindExpression', - 'Projection', - 'PropertyFilter', - 'PropertyOrder', - 'PropertyReference', - 'Query', - 'QueryResultBatch', - 'ExecutionStats', - 'ExplainMetrics', - 'ExplainOptions', - 'PlanSummary', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore/gapic_version.py b/owl-bot-staging/datastore/v1/google/cloud/datastore/gapic_version.py deleted file mode 100644 index 558c8aab..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore/py.typed b/owl-bot-staging/datastore/v1/google/cloud/datastore/py.typed deleted file mode 100644 index e82a9319..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-datastore package uses inline types. diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/__init__.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/__init__.py deleted file mode 100644 index 77ae3b80..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/__init__.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.datastore_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.datastore import DatastoreClient -from .services.datastore import DatastoreAsyncClient - -from .types.aggregation_result import AggregationResult -from .types.aggregation_result import AggregationResultBatch -from .types.datastore import AllocateIdsRequest -from .types.datastore import AllocateIdsResponse -from .types.datastore import BeginTransactionRequest -from .types.datastore import BeginTransactionResponse -from .types.datastore import CommitRequest -from .types.datastore import CommitResponse -from .types.datastore import LookupRequest -from .types.datastore import LookupResponse -from .types.datastore import Mutation -from .types.datastore import MutationResult -from .types.datastore import ReadOptions -from .types.datastore import ReserveIdsRequest -from .types.datastore import ReserveIdsResponse -from .types.datastore import RollbackRequest -from .types.datastore import RollbackResponse -from .types.datastore import RunAggregationQueryRequest -from .types.datastore import RunAggregationQueryResponse -from .types.datastore import RunQueryRequest -from .types.datastore import RunQueryResponse -from .types.datastore import TransactionOptions -from .types.entity import ArrayValue -from .types.entity import Entity -from .types.entity import Key -from .types.entity import PartitionId -from .types.entity import Value -from .types.query import AggregationQuery -from .types.query import CompositeFilter -from .types.query import EntityResult -from .types.query import Filter -from .types.query import GqlQuery -from .types.query import GqlQueryParameter -from .types.query import KindExpression -from .types.query import Projection -from .types.query import PropertyFilter -from .types.query import PropertyOrder -from .types.query import PropertyReference -from .types.query import Query -from .types.query import QueryResultBatch -from .types.query_profile import ExecutionStats -from .types.query_profile import ExplainMetrics -from .types.query_profile import ExplainOptions -from .types.query_profile import PlanSummary - -__all__ = ( - 'DatastoreAsyncClient', -'AggregationQuery', -'AggregationResult', -'AggregationResultBatch', -'AllocateIdsRequest', -'AllocateIdsResponse', -'ArrayValue', -'BeginTransactionRequest', -'BeginTransactionResponse', -'CommitRequest', -'CommitResponse', -'CompositeFilter', -'DatastoreClient', -'Entity', -'EntityResult', -'ExecutionStats', -'ExplainMetrics', -'ExplainOptions', -'Filter', -'GqlQuery', -'GqlQueryParameter', -'Key', -'KindExpression', -'LookupRequest', -'LookupResponse', -'Mutation', -'MutationResult', -'PartitionId', -'PlanSummary', -'Projection', -'PropertyFilter', -'PropertyOrder', -'PropertyReference', -'Query', -'QueryResultBatch', -'ReadOptions', -'ReserveIdsRequest', -'ReserveIdsResponse', -'RollbackRequest', -'RollbackResponse', -'RunAggregationQueryRequest', -'RunAggregationQueryResponse', -'RunQueryRequest', -'RunQueryResponse', -'TransactionOptions', -'Value', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/gapic_metadata.json b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/gapic_metadata.json deleted file mode 100644 index a38c2609..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/gapic_metadata.json +++ /dev/null @@ -1,148 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.datastore_v1", - "protoPackage": "google.datastore.v1", - "schema": "1.0", - "services": { - "Datastore": { - "clients": { - "grpc": { - "libraryClient": "DatastoreClient", - "rpcs": { - "AllocateIds": { - "methods": [ - "allocate_ids" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "Lookup": { - "methods": [ - "lookup" - ] - }, - "ReserveIds": { - "methods": [ - "reserve_ids" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DatastoreAsyncClient", - "rpcs": { - "AllocateIds": { - "methods": [ - "allocate_ids" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "Lookup": { - "methods": [ - "lookup" - ] - }, - "ReserveIds": { - "methods": [ - "reserve_ids" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - } - } - }, - "rest": { - "libraryClient": "DatastoreClient", - "rpcs": { - "AllocateIds": { - "methods": [ - "allocate_ids" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "Lookup": { - "methods": [ - "lookup" - ] - }, - "ReserveIds": { - "methods": [ - "reserve_ids" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "RunAggregationQuery": { - "methods": [ - "run_aggregation_query" - ] - }, - "RunQuery": { - "methods": [ - "run_query" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/gapic_version.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/gapic_version.py deleted file mode 100644 index 558c8aab..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/py.typed b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/py.typed deleted file mode 100644 index e82a9319..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-datastore package uses inline types. diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/__init__.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/__init__.py deleted file mode 100644 index 8f6cf068..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/__init__.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/__init__.py deleted file mode 100644 index 1ae6e5b9..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DatastoreClient -from .async_client import DatastoreAsyncClient - -__all__ = ( - 'DatastoreClient', - 'DatastoreAsyncClient', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/async_client.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/async_client.py deleted file mode 100644 index 5ef1c847..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/async_client.py +++ /dev/null @@ -1,1383 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.datastore_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.datastore_v1.types import aggregation_result -from google.cloud.datastore_v1.types import datastore -from google.cloud.datastore_v1.types import entity -from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport -from .client import DatastoreClient - - -class DatastoreAsyncClient: - """Each RPC normalizes the partition IDs of the keys in its - input entities, and always returns entities with keys with - normalized partition IDs. This applies to all keys and entities, - including those in values, except keys with both an empty path - and an empty or unset partition ID. Normalization of input keys - sets the project ID (if not already set) to the project ID from - the request. - """ - - _client: DatastoreClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DatastoreClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DatastoreClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DatastoreClient._DEFAULT_UNIVERSE - - common_billing_account_path = staticmethod(DatastoreClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DatastoreClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DatastoreClient.common_folder_path) - parse_common_folder_path = staticmethod(DatastoreClient.parse_common_folder_path) - common_organization_path = staticmethod(DatastoreClient.common_organization_path) - parse_common_organization_path = staticmethod(DatastoreClient.parse_common_organization_path) - common_project_path = staticmethod(DatastoreClient.common_project_path) - parse_common_project_path = staticmethod(DatastoreClient.parse_common_project_path) - common_location_path = staticmethod(DatastoreClient.common_location_path) - parse_common_location_path = staticmethod(DatastoreClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreAsyncClient: The constructed client. - """ - return DatastoreClient.from_service_account_info.__func__(DatastoreAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreAsyncClient: The constructed client. - """ - return DatastoreClient.from_service_account_file.__func__(DatastoreAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DatastoreClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DatastoreTransport: - """Returns the transport used by the client instance. - - Returns: - DatastoreTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = functools.partial(type(DatastoreClient).get_transport_class, type(DatastoreClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatastoreTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the datastore async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DatastoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DatastoreClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def lookup(self, - request: Optional[Union[datastore.LookupRequest, dict]] = None, - *, - project_id: Optional[str] = None, - read_options: Optional[datastore.ReadOptions] = None, - keys: Optional[MutableSequence[entity.Key]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.LookupResponse: - r"""Looks up entities by key. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - async def sample_lookup(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.LookupRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.lookup(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_v1.types.LookupRequest, dict]]): - The request object. The request for - [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - project_id (:class:`str`): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - read_options (:class:`google.cloud.datastore_v1.types.ReadOptions`): - The options for this lookup request. - This corresponds to the ``read_options`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - keys (:class:`MutableSequence[google.cloud.datastore_v1.types.Key]`): - Required. Keys of entities to look - up. - - This corresponds to the ``keys`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.LookupResponse: - The response for - [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, read_options, keys]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datastore.LookupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if read_options is not None: - request.read_options = read_options - if keys: - request.keys.extend(keys) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.lookup, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_query(self, - request: Optional[Union[datastore.RunQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.RunQueryResponse: - r"""Queries for entities. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - async def sample_run_query(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.RunQueryRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.run_query(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]]): - The request object. The request for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.RunQueryResponse: - The response for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - - """ - # Create or coerce a protobuf request object. - request = datastore.RunQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_query, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def run_aggregation_query(self, - request: Optional[Union[datastore.RunAggregationQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.RunAggregationQueryResponse: - r"""Runs an aggregation query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - async def sample_run_aggregation_query(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.RunAggregationQueryRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.run_aggregation_query(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_v1.types.RunAggregationQueryRequest, dict]]): - The request object. The request for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.RunAggregationQueryResponse: - The response for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - - """ - # Create or coerce a protobuf request object. - request = datastore.RunAggregationQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_aggregation_query, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def begin_transaction(self, - request: Optional[Union[datastore.BeginTransactionRequest, dict]] = None, - *, - project_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.BeginTransactionResponse: - r"""Begins a new transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - async def sample_begin_transaction(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.BeginTransactionRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]]): - The request object. The request for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - project_id (:class:`str`): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.BeginTransactionResponse: - The response for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datastore.BeginTransactionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.begin_transaction, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def commit(self, - request: Optional[Union[datastore.CommitRequest, dict]] = None, - *, - project_id: Optional[str] = None, - mode: Optional[datastore.CommitRequest.Mode] = None, - transaction: Optional[bytes] = None, - mutations: Optional[MutableSequence[datastore.Mutation]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.CommitResponse: - r"""Commits a transaction, optionally creating, deleting - or modifying some entities. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - async def sample_commit(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.CommitRequest( - transaction=b'transaction_blob', - project_id="project_id_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_v1.types.CommitRequest, dict]]): - The request object. The request for - [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - project_id (:class:`str`): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mode (:class:`google.cloud.datastore_v1.types.CommitRequest.Mode`): - The type of commit to perform. Defaults to - ``TRANSACTIONAL``. - - This corresponds to the ``mode`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (:class:`bytes`): - The identifier of the transaction associated with the - commit. A transaction identifier is returned by a call - to - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutations (:class:`MutableSequence[google.cloud.datastore_v1.types.Mutation]`): - The mutations to perform. - - When mode is ``TRANSACTIONAL``, mutations affecting a - single entity are applied in order. The following - sequences of mutations affecting a single entity are not - permitted in a single ``Commit`` request: - - - ``insert`` followed by ``insert`` - - ``update`` followed by ``insert`` - - ``upsert`` followed by ``insert`` - - ``delete`` followed by ``update`` - - When mode is ``NON_TRANSACTIONAL``, no two mutations may - affect a single entity. - - This corresponds to the ``mutations`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.CommitResponse: - The response for - [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, mode, transaction, mutations]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datastore.CommitRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if mode is not None: - request.mode = mode - if transaction is not None: - request.transaction = transaction - if mutations: - request.mutations.extend(mutations) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.commit, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rollback(self, - request: Optional[Union[datastore.RollbackRequest, dict]] = None, - *, - project_id: Optional[str] = None, - transaction: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.RollbackResponse: - r"""Rolls back a transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - async def sample_rollback(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.RollbackRequest( - project_id="project_id_value", - transaction=b'transaction_blob', - ) - - # Make the request - response = await client.rollback(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_v1.types.RollbackRequest, dict]]): - The request object. The request for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - project_id (:class:`str`): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (:class:`bytes`): - Required. The transaction identifier, returned by a call - to - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.RollbackResponse: - The response for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - (an empty message). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, transaction]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datastore.RollbackRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if transaction is not None: - request.transaction = transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rollback, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def allocate_ids(self, - request: Optional[Union[datastore.AllocateIdsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - keys: Optional[MutableSequence[entity.Key]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.AllocateIdsResponse: - r"""Allocates IDs for the given keys, which is useful for - referencing an entity before it is inserted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - async def sample_allocate_ids(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.AllocateIdsRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.allocate_ids(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]]): - The request object. The request for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - project_id (:class:`str`): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - keys (:class:`MutableSequence[google.cloud.datastore_v1.types.Key]`): - Required. A list of keys with - incomplete key paths for which to - allocate IDs. No key may be - reserved/read-only. - - This corresponds to the ``keys`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.AllocateIdsResponse: - The response for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datastore.AllocateIdsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if keys: - request.keys.extend(keys) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.allocate_ids, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reserve_ids(self, - request: Optional[Union[datastore.ReserveIdsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - keys: Optional[MutableSequence[entity.Key]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.ReserveIdsResponse: - r"""Prevents the supplied keys' IDs from being - auto-allocated by Cloud Datastore. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - async def sample_reserve_ids(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.ReserveIdsRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.reserve_ids(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]]): - The request object. The request for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - project_id (:class:`str`): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - keys (:class:`MutableSequence[google.cloud.datastore_v1.types.Key]`): - Required. A list of keys with - complete key paths whose numeric IDs - should not be auto-allocated. - - This corresponds to the ``keys`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.ReserveIdsResponse: - The response for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datastore.ReserveIdsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if keys: - request.keys.extend(keys) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reserve_ids, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "DatastoreAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DatastoreAsyncClient", -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/client.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/client.py deleted file mode 100644 index 4c99ba6d..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/client.py +++ /dev/null @@ -1,1740 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.datastore_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.cloud.datastore_v1.types import aggregation_result -from google.cloud.datastore_v1.types import datastore -from google.cloud.datastore_v1.types import entity -from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DatastoreGrpcTransport -from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport -from .transports.rest import DatastoreRestTransport - - -class DatastoreClientMeta(type): - """Metaclass for the Datastore client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreTransport]] - _transport_registry["grpc"] = DatastoreGrpcTransport - _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport - _transport_registry["rest"] = DatastoreRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DatastoreTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DatastoreClient(metaclass=DatastoreClientMeta): - """Each RPC normalizes the partition IDs of the keys in its - input entities, and always returns entities with keys with - normalized partition IDs. This applies to all keys and entities, - including those in values, except keys with both an empty path - and an empty or unset partition ID. Normalization of input keys - sets the project ID (if not already set) to the project ID from - the request. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "datastore.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "datastore.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DatastoreTransport: - """Returns the transport used by the client instance. - - Returns: - DatastoreTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DatastoreClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DatastoreClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DatastoreClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DatastoreClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - DatastoreClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatastoreTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the datastore client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DatastoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DatastoreClient._read_environment_variables() - self._client_cert_source = DatastoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DatastoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DatastoreTransport) - if transport_provided: - # transport is a DatastoreTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DatastoreTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DatastoreClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def lookup(self, - request: Optional[Union[datastore.LookupRequest, dict]] = None, - *, - project_id: Optional[str] = None, - read_options: Optional[datastore.ReadOptions] = None, - keys: Optional[MutableSequence[entity.Key]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.LookupResponse: - r"""Looks up entities by key. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - def sample_lookup(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.LookupRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.lookup(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_v1.types.LookupRequest, dict]): - The request object. The request for - [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - project_id (str): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - read_options (google.cloud.datastore_v1.types.ReadOptions): - The options for this lookup request. - This corresponds to the ``read_options`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - keys (MutableSequence[google.cloud.datastore_v1.types.Key]): - Required. Keys of entities to look - up. - - This corresponds to the ``keys`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.LookupResponse: - The response for - [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, read_options, keys]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datastore.LookupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore.LookupRequest): - request = datastore.LookupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if read_options is not None: - request.read_options = read_options - if keys is not None: - request.keys = keys - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.lookup] - - header_params = {} - - if request.project_id: - header_params["project_id"] = request.project_id - - if request.database_id: - header_params["database_id"] = request.database_id - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_query(self, - request: Optional[Union[datastore.RunQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.RunQueryResponse: - r"""Queries for entities. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - def sample_run_query(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.RunQueryRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.run_query(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]): - The request object. The request for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.RunQueryResponse: - The response for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RunQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore.RunQueryRequest): - request = datastore.RunQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_query] - - header_params = {} - - if request.project_id: - header_params["project_id"] = request.project_id - - if request.database_id: - header_params["database_id"] = request.database_id - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def run_aggregation_query(self, - request: Optional[Union[datastore.RunAggregationQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.RunAggregationQueryResponse: - r"""Runs an aggregation query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - def sample_run_aggregation_query(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.RunAggregationQueryRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.run_aggregation_query(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_v1.types.RunAggregationQueryRequest, dict]): - The request object. The request for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.RunAggregationQueryResponse: - The response for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RunAggregationQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore.RunAggregationQueryRequest): - request = datastore.RunAggregationQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.run_aggregation_query] - - header_params = {} - - if request.project_id: - header_params["project_id"] = request.project_id - - if request.database_id: - header_params["database_id"] = request.database_id - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def begin_transaction(self, - request: Optional[Union[datastore.BeginTransactionRequest, dict]] = None, - *, - project_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.BeginTransactionResponse: - r"""Begins a new transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - def sample_begin_transaction(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.BeginTransactionRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]): - The request object. The request for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - project_id (str): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.BeginTransactionResponse: - The response for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datastore.BeginTransactionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore.BeginTransactionRequest): - request = datastore.BeginTransactionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.begin_transaction] - - header_params = {} - - if request.project_id: - header_params["project_id"] = request.project_id - - if request.database_id: - header_params["database_id"] = request.database_id - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def commit(self, - request: Optional[Union[datastore.CommitRequest, dict]] = None, - *, - project_id: Optional[str] = None, - mode: Optional[datastore.CommitRequest.Mode] = None, - transaction: Optional[bytes] = None, - mutations: Optional[MutableSequence[datastore.Mutation]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.CommitResponse: - r"""Commits a transaction, optionally creating, deleting - or modifying some entities. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - def sample_commit(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.CommitRequest( - transaction=b'transaction_blob', - project_id="project_id_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_v1.types.CommitRequest, dict]): - The request object. The request for - [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - project_id (str): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mode (google.cloud.datastore_v1.types.CommitRequest.Mode): - The type of commit to perform. Defaults to - ``TRANSACTIONAL``. - - This corresponds to the ``mode`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (bytes): - The identifier of the transaction associated with the - commit. A transaction identifier is returned by a call - to - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutations (MutableSequence[google.cloud.datastore_v1.types.Mutation]): - The mutations to perform. - - When mode is ``TRANSACTIONAL``, mutations affecting a - single entity are applied in order. The following - sequences of mutations affecting a single entity are not - permitted in a single ``Commit`` request: - - - ``insert`` followed by ``insert`` - - ``update`` followed by ``insert`` - - ``upsert`` followed by ``insert`` - - ``delete`` followed by ``update`` - - When mode is ``NON_TRANSACTIONAL``, no two mutations may - affect a single entity. - - This corresponds to the ``mutations`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.CommitResponse: - The response for - [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, mode, transaction, mutations]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datastore.CommitRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore.CommitRequest): - request = datastore.CommitRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if mode is not None: - request.mode = mode - if transaction is not None: - request.transaction = transaction - if mutations is not None: - request.mutations = mutations - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.commit] - - header_params = {} - - if request.project_id: - header_params["project_id"] = request.project_id - - if request.database_id: - header_params["database_id"] = request.database_id - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rollback(self, - request: Optional[Union[datastore.RollbackRequest, dict]] = None, - *, - project_id: Optional[str] = None, - transaction: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.RollbackResponse: - r"""Rolls back a transaction. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - def sample_rollback(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.RollbackRequest( - project_id="project_id_value", - transaction=b'transaction_blob', - ) - - # Make the request - response = client.rollback(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_v1.types.RollbackRequest, dict]): - The request object. The request for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - project_id (str): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (bytes): - Required. The transaction identifier, returned by a call - to - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.RollbackResponse: - The response for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - (an empty message). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, transaction]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datastore.RollbackRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore.RollbackRequest): - request = datastore.RollbackRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if transaction is not None: - request.transaction = transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rollback] - - header_params = {} - - if request.project_id: - header_params["project_id"] = request.project_id - - if request.database_id: - header_params["database_id"] = request.database_id - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def allocate_ids(self, - request: Optional[Union[datastore.AllocateIdsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - keys: Optional[MutableSequence[entity.Key]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.AllocateIdsResponse: - r"""Allocates IDs for the given keys, which is useful for - referencing an entity before it is inserted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - def sample_allocate_ids(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.AllocateIdsRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.allocate_ids(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]): - The request object. The request for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - project_id (str): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - keys (MutableSequence[google.cloud.datastore_v1.types.Key]): - Required. A list of keys with - incomplete key paths for which to - allocate IDs. No key may be - reserved/read-only. - - This corresponds to the ``keys`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.AllocateIdsResponse: - The response for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datastore.AllocateIdsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore.AllocateIdsRequest): - request = datastore.AllocateIdsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if keys is not None: - request.keys = keys - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.allocate_ids] - - header_params = {} - - if request.project_id: - header_params["project_id"] = request.project_id - - if request.database_id: - header_params["database_id"] = request.database_id - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reserve_ids(self, - request: Optional[Union[datastore.ReserveIdsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - keys: Optional[MutableSequence[entity.Key]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datastore.ReserveIdsResponse: - r"""Prevents the supplied keys' IDs from being - auto-allocated by Cloud Datastore. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_v1 - - def sample_reserve_ids(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.ReserveIdsRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.reserve_ids(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]): - The request object. The request for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - project_id (str): - Required. The ID of the project - against which to make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - keys (MutableSequence[google.cloud.datastore_v1.types.Key]): - Required. A list of keys with - complete key paths whose numeric IDs - should not be auto-allocated. - - This corresponds to the ``keys`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_v1.types.ReserveIdsResponse: - The response for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, keys]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datastore.ReserveIdsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore.ReserveIdsRequest): - request = datastore.ReserveIdsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if keys is not None: - request.keys = keys - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reserve_ids] - - header_params = {} - - if request.project_id: - header_params["project_id"] = request.project_id - - if request.database_id: - header_params["database_id"] = request.database_id - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DatastoreClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DatastoreClient", -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/__init__.py deleted file mode 100644 index 1f92eea5..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DatastoreTransport -from .grpc import DatastoreGrpcTransport -from .grpc_asyncio import DatastoreGrpcAsyncIOTransport -from .rest import DatastoreRestTransport -from .rest import DatastoreRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreTransport]] -_transport_registry['grpc'] = DatastoreGrpcTransport -_transport_registry['grpc_asyncio'] = DatastoreGrpcAsyncIOTransport -_transport_registry['rest'] = DatastoreRestTransport - -__all__ = ( - 'DatastoreTransport', - 'DatastoreGrpcTransport', - 'DatastoreGrpcAsyncIOTransport', - 'DatastoreRestTransport', - 'DatastoreRestInterceptor', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/base.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/base.py deleted file mode 100644 index 84aabcee..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/base.py +++ /dev/null @@ -1,316 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.datastore_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.datastore_v1.types import datastore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DatastoreTransport(abc.ABC): - """Abstract transport class for Datastore.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', - ) - - DEFAULT_HOST: str = 'datastore.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'datastore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.lookup: gapic_v1.method.wrap_method( - self.lookup, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_query: gapic_v1.method.wrap_method( - self.run_query, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.run_aggregation_query: gapic_v1.method.wrap_method( - self.run_aggregation_query, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, - default_timeout=60.0, - client_info=client_info, - ), - self.commit: gapic_v1.method.wrap_method( - self.commit, - default_timeout=60.0, - client_info=client_info, - ), - self.rollback: gapic_v1.method.wrap_method( - self.rollback, - default_timeout=60.0, - client_info=client_info, - ), - self.allocate_ids: gapic_v1.method.wrap_method( - self.allocate_ids, - default_timeout=60.0, - client_info=client_info, - ), - self.reserve_ids: gapic_v1.method.wrap_method( - self.reserve_ids, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def lookup(self) -> Callable[ - [datastore.LookupRequest], - Union[ - datastore.LookupResponse, - Awaitable[datastore.LookupResponse] - ]]: - raise NotImplementedError() - - @property - def run_query(self) -> Callable[ - [datastore.RunQueryRequest], - Union[ - datastore.RunQueryResponse, - Awaitable[datastore.RunQueryResponse] - ]]: - raise NotImplementedError() - - @property - def run_aggregation_query(self) -> Callable[ - [datastore.RunAggregationQueryRequest], - Union[ - datastore.RunAggregationQueryResponse, - Awaitable[datastore.RunAggregationQueryResponse] - ]]: - raise NotImplementedError() - - @property - def begin_transaction(self) -> Callable[ - [datastore.BeginTransactionRequest], - Union[ - datastore.BeginTransactionResponse, - Awaitable[datastore.BeginTransactionResponse] - ]]: - raise NotImplementedError() - - @property - def commit(self) -> Callable[ - [datastore.CommitRequest], - Union[ - datastore.CommitResponse, - Awaitable[datastore.CommitResponse] - ]]: - raise NotImplementedError() - - @property - def rollback(self) -> Callable[ - [datastore.RollbackRequest], - Union[ - datastore.RollbackResponse, - Awaitable[datastore.RollbackResponse] - ]]: - raise NotImplementedError() - - @property - def allocate_ids(self) -> Callable[ - [datastore.AllocateIdsRequest], - Union[ - datastore.AllocateIdsResponse, - Awaitable[datastore.AllocateIdsResponse] - ]]: - raise NotImplementedError() - - @property - def reserve_ids(self) -> Callable[ - [datastore.ReserveIdsRequest], - Union[ - datastore.ReserveIdsResponse, - Awaitable[datastore.ReserveIdsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DatastoreTransport', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/grpc.py deleted file mode 100644 index f8680a71..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ /dev/null @@ -1,528 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.datastore_v1.types import datastore -from google.longrunning import operations_pb2 # type: ignore -from .base import DatastoreTransport, DEFAULT_CLIENT_INFO - - -class DatastoreGrpcTransport(DatastoreTransport): - """gRPC backend transport for Datastore. - - Each RPC normalizes the partition IDs of the keys in its - input entities, and always returns entities with keys with - normalized partition IDs. This applies to all keys and entities, - including those in values, except keys with both an empty path - and an empty or unset partition ID. Normalization of input keys - sets the project ID (if not already set) to the project ID from - the request. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'datastore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def lookup(self) -> Callable[ - [datastore.LookupRequest], - datastore.LookupResponse]: - r"""Return a callable for the lookup method over gRPC. - - Looks up entities by key. - - Returns: - Callable[[~.LookupRequest], - ~.LookupResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup' not in self._stubs: - self._stubs['lookup'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/Lookup', - request_serializer=datastore.LookupRequest.serialize, - response_deserializer=datastore.LookupResponse.deserialize, - ) - return self._stubs['lookup'] - - @property - def run_query(self) -> Callable[ - [datastore.RunQueryRequest], - datastore.RunQueryResponse]: - r"""Return a callable for the run query method over gRPC. - - Queries for entities. - - Returns: - Callable[[~.RunQueryRequest], - ~.RunQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_query' not in self._stubs: - self._stubs['run_query'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/RunQuery', - request_serializer=datastore.RunQueryRequest.serialize, - response_deserializer=datastore.RunQueryResponse.deserialize, - ) - return self._stubs['run_query'] - - @property - def run_aggregation_query(self) -> Callable[ - [datastore.RunAggregationQueryRequest], - datastore.RunAggregationQueryResponse]: - r"""Return a callable for the run aggregation query method over gRPC. - - Runs an aggregation query. - - Returns: - Callable[[~.RunAggregationQueryRequest], - ~.RunAggregationQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_aggregation_query' not in self._stubs: - self._stubs['run_aggregation_query'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/RunAggregationQuery', - request_serializer=datastore.RunAggregationQueryRequest.serialize, - response_deserializer=datastore.RunAggregationQueryResponse.deserialize, - ) - return self._stubs['run_aggregation_query'] - - @property - def begin_transaction(self) -> Callable[ - [datastore.BeginTransactionRequest], - datastore.BeginTransactionResponse]: - r"""Return a callable for the begin transaction method over gRPC. - - Begins a new transaction. - - Returns: - Callable[[~.BeginTransactionRequest], - ~.BeginTransactionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/BeginTransaction', - request_serializer=datastore.BeginTransactionRequest.serialize, - response_deserializer=datastore.BeginTransactionResponse.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [datastore.CommitRequest], - datastore.CommitResponse]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction, optionally creating, deleting - or modifying some entities. - - Returns: - Callable[[~.CommitRequest], - ~.CommitResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/Commit', - request_serializer=datastore.CommitRequest.serialize, - response_deserializer=datastore.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [datastore.RollbackRequest], - datastore.RollbackResponse]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction. - - Returns: - Callable[[~.RollbackRequest], - ~.RollbackResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/Rollback', - request_serializer=datastore.RollbackRequest.serialize, - response_deserializer=datastore.RollbackResponse.deserialize, - ) - return self._stubs['rollback'] - - @property - def allocate_ids(self) -> Callable[ - [datastore.AllocateIdsRequest], - datastore.AllocateIdsResponse]: - r"""Return a callable for the allocate ids method over gRPC. - - Allocates IDs for the given keys, which is useful for - referencing an entity before it is inserted. - - Returns: - Callable[[~.AllocateIdsRequest], - ~.AllocateIdsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'allocate_ids' not in self._stubs: - self._stubs['allocate_ids'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/AllocateIds', - request_serializer=datastore.AllocateIdsRequest.serialize, - response_deserializer=datastore.AllocateIdsResponse.deserialize, - ) - return self._stubs['allocate_ids'] - - @property - def reserve_ids(self) -> Callable[ - [datastore.ReserveIdsRequest], - datastore.ReserveIdsResponse]: - r"""Return a callable for the reserve ids method over gRPC. - - Prevents the supplied keys' IDs from being - auto-allocated by Cloud Datastore. - - Returns: - Callable[[~.ReserveIdsRequest], - ~.ReserveIdsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reserve_ids' not in self._stubs: - self._stubs['reserve_ids'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/ReserveIds', - request_serializer=datastore.ReserveIdsRequest.serialize, - response_deserializer=datastore.ReserveIdsResponse.deserialize, - ) - return self._stubs['reserve_ids'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DatastoreGrpcTransport', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py deleted file mode 100644 index c40f7287..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ /dev/null @@ -1,527 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.datastore_v1.types import datastore -from google.longrunning import operations_pb2 # type: ignore -from .base import DatastoreTransport, DEFAULT_CLIENT_INFO -from .grpc import DatastoreGrpcTransport - - -class DatastoreGrpcAsyncIOTransport(DatastoreTransport): - """gRPC AsyncIO backend transport for Datastore. - - Each RPC normalizes the partition IDs of the keys in its - input entities, and always returns entities with keys with - normalized partition IDs. This applies to all keys and entities, - including those in values, except keys with both an empty path - and an empty or unset partition ID. Normalization of input keys - sets the project ID (if not already set) to the project ID from - the request. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'datastore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def lookup(self) -> Callable[ - [datastore.LookupRequest], - Awaitable[datastore.LookupResponse]]: - r"""Return a callable for the lookup method over gRPC. - - Looks up entities by key. - - Returns: - Callable[[~.LookupRequest], - Awaitable[~.LookupResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup' not in self._stubs: - self._stubs['lookup'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/Lookup', - request_serializer=datastore.LookupRequest.serialize, - response_deserializer=datastore.LookupResponse.deserialize, - ) - return self._stubs['lookup'] - - @property - def run_query(self) -> Callable[ - [datastore.RunQueryRequest], - Awaitable[datastore.RunQueryResponse]]: - r"""Return a callable for the run query method over gRPC. - - Queries for entities. - - Returns: - Callable[[~.RunQueryRequest], - Awaitable[~.RunQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_query' not in self._stubs: - self._stubs['run_query'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/RunQuery', - request_serializer=datastore.RunQueryRequest.serialize, - response_deserializer=datastore.RunQueryResponse.deserialize, - ) - return self._stubs['run_query'] - - @property - def run_aggregation_query(self) -> Callable[ - [datastore.RunAggregationQueryRequest], - Awaitable[datastore.RunAggregationQueryResponse]]: - r"""Return a callable for the run aggregation query method over gRPC. - - Runs an aggregation query. - - Returns: - Callable[[~.RunAggregationQueryRequest], - Awaitable[~.RunAggregationQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'run_aggregation_query' not in self._stubs: - self._stubs['run_aggregation_query'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/RunAggregationQuery', - request_serializer=datastore.RunAggregationQueryRequest.serialize, - response_deserializer=datastore.RunAggregationQueryResponse.deserialize, - ) - return self._stubs['run_aggregation_query'] - - @property - def begin_transaction(self) -> Callable[ - [datastore.BeginTransactionRequest], - Awaitable[datastore.BeginTransactionResponse]]: - r"""Return a callable for the begin transaction method over gRPC. - - Begins a new transaction. - - Returns: - Callable[[~.BeginTransactionRequest], - Awaitable[~.BeginTransactionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/BeginTransaction', - request_serializer=datastore.BeginTransactionRequest.serialize, - response_deserializer=datastore.BeginTransactionResponse.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [datastore.CommitRequest], - Awaitable[datastore.CommitResponse]]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction, optionally creating, deleting - or modifying some entities. - - Returns: - Callable[[~.CommitRequest], - Awaitable[~.CommitResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/Commit', - request_serializer=datastore.CommitRequest.serialize, - response_deserializer=datastore.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [datastore.RollbackRequest], - Awaitable[datastore.RollbackResponse]]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction. - - Returns: - Callable[[~.RollbackRequest], - Awaitable[~.RollbackResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/Rollback', - request_serializer=datastore.RollbackRequest.serialize, - response_deserializer=datastore.RollbackResponse.deserialize, - ) - return self._stubs['rollback'] - - @property - def allocate_ids(self) -> Callable[ - [datastore.AllocateIdsRequest], - Awaitable[datastore.AllocateIdsResponse]]: - r"""Return a callable for the allocate ids method over gRPC. - - Allocates IDs for the given keys, which is useful for - referencing an entity before it is inserted. - - Returns: - Callable[[~.AllocateIdsRequest], - Awaitable[~.AllocateIdsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'allocate_ids' not in self._stubs: - self._stubs['allocate_ids'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/AllocateIds', - request_serializer=datastore.AllocateIdsRequest.serialize, - response_deserializer=datastore.AllocateIdsResponse.deserialize, - ) - return self._stubs['allocate_ids'] - - @property - def reserve_ids(self) -> Callable[ - [datastore.ReserveIdsRequest], - Awaitable[datastore.ReserveIdsResponse]]: - r"""Return a callable for the reserve ids method over gRPC. - - Prevents the supplied keys' IDs from being - auto-allocated by Cloud Datastore. - - Returns: - Callable[[~.ReserveIdsRequest], - Awaitable[~.ReserveIdsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reserve_ids' not in self._stubs: - self._stubs['reserve_ids'] = self.grpc_channel.unary_unary( - '/google.datastore.v1.Datastore/ReserveIds', - request_serializer=datastore.ReserveIdsRequest.serialize, - response_deserializer=datastore.ReserveIdsResponse.deserialize, - ) - return self._stubs['reserve_ids'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'DatastoreGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/rest.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/rest.py deleted file mode 100644 index 0fd31b77..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/services/datastore/transports/rest.py +++ /dev/null @@ -1,1475 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.cloud.datastore_v1.types import datastore -from google.longrunning import operations_pb2 # type: ignore - -from .base import DatastoreTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class DatastoreRestInterceptor: - """Interceptor for Datastore. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DatastoreRestTransport. - - .. code-block:: python - class MyCustomDatastoreInterceptor(DatastoreRestInterceptor): - def pre_allocate_ids(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_allocate_ids(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_begin_transaction(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_begin_transaction(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_commit(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_commit(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_lookup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_lookup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_reserve_ids(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_reserve_ids(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rollback(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_rollback(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_run_aggregation_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_aggregation_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_run_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_run_query(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DatastoreRestTransport(interceptor=MyCustomDatastoreInterceptor()) - client = DatastoreClient(transport=transport) - - - """ - def pre_allocate_ids(self, request: datastore.AllocateIdsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore.AllocateIdsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for allocate_ids - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_allocate_ids(self, response: datastore.AllocateIdsResponse) -> datastore.AllocateIdsResponse: - """Post-rpc interceptor for allocate_ids - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_begin_transaction(self, request: datastore.BeginTransactionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore.BeginTransactionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for begin_transaction - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_begin_transaction(self, response: datastore.BeginTransactionResponse) -> datastore.BeginTransactionResponse: - """Post-rpc interceptor for begin_transaction - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_commit(self, request: datastore.CommitRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore.CommitRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for commit - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_commit(self, response: datastore.CommitResponse) -> datastore.CommitResponse: - """Post-rpc interceptor for commit - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_lookup(self, request: datastore.LookupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore.LookupRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for lookup - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_lookup(self, response: datastore.LookupResponse) -> datastore.LookupResponse: - """Post-rpc interceptor for lookup - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_reserve_ids(self, request: datastore.ReserveIdsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore.ReserveIdsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for reserve_ids - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_reserve_ids(self, response: datastore.ReserveIdsResponse) -> datastore.ReserveIdsResponse: - """Post-rpc interceptor for reserve_ids - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_rollback(self, request: datastore.RollbackRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore.RollbackRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for rollback - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_rollback(self, response: datastore.RollbackResponse) -> datastore.RollbackResponse: - """Post-rpc interceptor for rollback - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_run_aggregation_query(self, request: datastore.RunAggregationQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore.RunAggregationQueryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for run_aggregation_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_run_aggregation_query(self, response: datastore.RunAggregationQueryResponse) -> datastore.RunAggregationQueryResponse: - """Post-rpc interceptor for run_aggregation_query - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_run_query(self, request: datastore.RunQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore.RunQueryRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for run_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_run_query(self, response: datastore.RunQueryResponse) -> datastore.RunQueryResponse: - """Post-rpc interceptor for run_query - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the Datastore server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the Datastore server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DatastoreRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DatastoreRestInterceptor - - -class DatastoreRestTransport(DatastoreTransport): - """REST backend transport for Datastore. - - Each RPC normalizes the partition IDs of the keys in its - input entities, and always returns entities with keys with - normalized partition IDs. This applies to all keys and entities, - including those in values, except keys with both an empty path - and an empty or unset partition ID. Normalization of input keys - sets the project ID (if not already set) to the project ID from - the request. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DatastoreRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'datastore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DatastoreRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _AllocateIds(DatastoreRestStub): - def __hash__(self): - return hash("AllocateIds") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore.AllocateIdsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore.AllocateIdsResponse: - r"""Call the allocate ids method over HTTP. - - Args: - request (~.datastore.AllocateIdsRequest): - The request object. The request for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore.AllocateIdsResponse: - The response for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:allocateIds', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_allocate_ids(request, metadata) - pb_request = datastore.AllocateIdsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore.AllocateIdsResponse() - pb_resp = datastore.AllocateIdsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_allocate_ids(resp) - return resp - - class _BeginTransaction(DatastoreRestStub): - def __hash__(self): - return hash("BeginTransaction") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore.BeginTransactionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore.BeginTransactionResponse: - r"""Call the begin transaction method over HTTP. - - Args: - request (~.datastore.BeginTransactionRequest): - The request object. The request for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore.BeginTransactionResponse: - The response for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:beginTransaction', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_begin_transaction(request, metadata) - pb_request = datastore.BeginTransactionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore.BeginTransactionResponse() - pb_resp = datastore.BeginTransactionResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_begin_transaction(resp) - return resp - - class _Commit(DatastoreRestStub): - def __hash__(self): - return hash("Commit") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore.CommitRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore.CommitResponse: - r"""Call the commit method over HTTP. - - Args: - request (~.datastore.CommitRequest): - The request object. The request for - [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore.CommitResponse: - The response for - [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:commit', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_commit(request, metadata) - pb_request = datastore.CommitRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore.CommitResponse() - pb_resp = datastore.CommitResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_commit(resp) - return resp - - class _Lookup(DatastoreRestStub): - def __hash__(self): - return hash("Lookup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore.LookupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore.LookupResponse: - r"""Call the lookup method over HTTP. - - Args: - request (~.datastore.LookupRequest): - The request object. The request for - [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore.LookupResponse: - The response for - [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:lookup', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_lookup(request, metadata) - pb_request = datastore.LookupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore.LookupResponse() - pb_resp = datastore.LookupResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_lookup(resp) - return resp - - class _ReserveIds(DatastoreRestStub): - def __hash__(self): - return hash("ReserveIds") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore.ReserveIdsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore.ReserveIdsResponse: - r"""Call the reserve ids method over HTTP. - - Args: - request (~.datastore.ReserveIdsRequest): - The request object. The request for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore.ReserveIdsResponse: - The response for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:reserveIds', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_reserve_ids(request, metadata) - pb_request = datastore.ReserveIdsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore.ReserveIdsResponse() - pb_resp = datastore.ReserveIdsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reserve_ids(resp) - return resp - - class _Rollback(DatastoreRestStub): - def __hash__(self): - return hash("Rollback") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore.RollbackRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore.RollbackResponse: - r"""Call the rollback method over HTTP. - - Args: - request (~.datastore.RollbackRequest): - The request object. The request for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore.RollbackResponse: - The response for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - (an empty message). - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:rollback', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_rollback(request, metadata) - pb_request = datastore.RollbackRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore.RollbackResponse() - pb_resp = datastore.RollbackResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_rollback(resp) - return resp - - class _RunAggregationQuery(DatastoreRestStub): - def __hash__(self): - return hash("RunAggregationQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore.RunAggregationQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore.RunAggregationQueryResponse: - r"""Call the run aggregation query method over HTTP. - - Args: - request (~.datastore.RunAggregationQueryRequest): - The request object. The request for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore.RunAggregationQueryResponse: - The response for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:runAggregationQuery', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_run_aggregation_query(request, metadata) - pb_request = datastore.RunAggregationQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore.RunAggregationQueryResponse() - pb_resp = datastore.RunAggregationQueryResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_run_aggregation_query(resp) - return resp - - class _RunQuery(DatastoreRestStub): - def __hash__(self): - return hash("RunQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore.RunQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore.RunQueryResponse: - r"""Call the run query method over HTTP. - - Args: - request (~.datastore.RunQueryRequest): - The request object. The request for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore.RunQueryResponse: - The response for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:runQuery', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_run_query(request, metadata) - pb_request = datastore.RunQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore.RunQueryResponse() - pb_resp = datastore.RunQueryResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_run_query(resp) - return resp - - @property - def allocate_ids(self) -> Callable[ - [datastore.AllocateIdsRequest], - datastore.AllocateIdsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AllocateIds(self._session, self._host, self._interceptor) # type: ignore - - @property - def begin_transaction(self) -> Callable[ - [datastore.BeginTransactionRequest], - datastore.BeginTransactionResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore - - @property - def commit(self) -> Callable[ - [datastore.CommitRequest], - datastore.CommitResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Commit(self._session, self._host, self._interceptor) # type: ignore - - @property - def lookup(self) -> Callable[ - [datastore.LookupRequest], - datastore.LookupResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Lookup(self._session, self._host, self._interceptor) # type: ignore - - @property - def reserve_ids(self) -> Callable[ - [datastore.ReserveIdsRequest], - datastore.ReserveIdsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ReserveIds(self._session, self._host, self._interceptor) # type: ignore - - @property - def rollback(self) -> Callable[ - [datastore.RollbackRequest], - datastore.RollbackResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Rollback(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_aggregation_query(self) -> Callable[ - [datastore.RunAggregationQueryRequest], - datastore.RunAggregationQueryResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunAggregationQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def run_query(self) -> Callable[ - [datastore.RunQueryRequest], - datastore.RunQueryResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RunQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(DatastoreRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/operations/*}:cancel', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(DatastoreRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(DatastoreRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(DatastoreRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/operations', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DatastoreRestTransport', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/__init__.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/__init__.py deleted file mode 100644 index 7b114724..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/__init__.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .aggregation_result import ( - AggregationResult, - AggregationResultBatch, -) -from .datastore import ( - AllocateIdsRequest, - AllocateIdsResponse, - BeginTransactionRequest, - BeginTransactionResponse, - CommitRequest, - CommitResponse, - LookupRequest, - LookupResponse, - Mutation, - MutationResult, - ReadOptions, - ReserveIdsRequest, - ReserveIdsResponse, - RollbackRequest, - RollbackResponse, - RunAggregationQueryRequest, - RunAggregationQueryResponse, - RunQueryRequest, - RunQueryResponse, - TransactionOptions, -) -from .entity import ( - ArrayValue, - Entity, - Key, - PartitionId, - Value, -) -from .query import ( - AggregationQuery, - CompositeFilter, - EntityResult, - Filter, - GqlQuery, - GqlQueryParameter, - KindExpression, - Projection, - PropertyFilter, - PropertyOrder, - PropertyReference, - Query, - QueryResultBatch, -) -from .query_profile import ( - ExecutionStats, - ExplainMetrics, - ExplainOptions, - PlanSummary, -) - -__all__ = ( - 'AggregationResult', - 'AggregationResultBatch', - 'AllocateIdsRequest', - 'AllocateIdsResponse', - 'BeginTransactionRequest', - 'BeginTransactionResponse', - 'CommitRequest', - 'CommitResponse', - 'LookupRequest', - 'LookupResponse', - 'Mutation', - 'MutationResult', - 'ReadOptions', - 'ReserveIdsRequest', - 'ReserveIdsResponse', - 'RollbackRequest', - 'RollbackResponse', - 'RunAggregationQueryRequest', - 'RunAggregationQueryResponse', - 'RunQueryRequest', - 'RunQueryResponse', - 'TransactionOptions', - 'ArrayValue', - 'Entity', - 'Key', - 'PartitionId', - 'Value', - 'AggregationQuery', - 'CompositeFilter', - 'EntityResult', - 'Filter', - 'GqlQuery', - 'GqlQueryParameter', - 'KindExpression', - 'Projection', - 'PropertyFilter', - 'PropertyOrder', - 'PropertyReference', - 'Query', - 'QueryResultBatch', - 'ExecutionStats', - 'ExplainMetrics', - 'ExplainOptions', - 'PlanSummary', -) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/aggregation_result.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/aggregation_result.py deleted file mode 100644 index a2b7e6ae..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/aggregation_result.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datastore_v1.types import entity -from google.cloud.datastore_v1.types import query -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.datastore.v1', - manifest={ - 'AggregationResult', - 'AggregationResultBatch', - }, -) - - -class AggregationResult(proto.Message): - r"""The result of a single bucket from a Datastore aggregation query. - - The keys of ``aggregate_properties`` are the same for all results in - an aggregation query, unlike entity queries which can have different - fields present for each result. - - Attributes: - aggregate_properties (MutableMapping[str, google.cloud.datastore_v1.types.Value]): - The result of the aggregation functions, ex: - ``COUNT(*) AS total_entities``. - - The key is the - [alias][google.datastore.v1.AggregationQuery.Aggregation.alias] - assigned to the aggregation function on input and the size - of this map equals the number of aggregation functions in - the query. - """ - - aggregate_properties: MutableMapping[str, entity.Value] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message=entity.Value, - ) - - -class AggregationResultBatch(proto.Message): - r"""A batch of aggregation results produced by an aggregation - query. - - Attributes: - aggregation_results (MutableSequence[google.cloud.datastore_v1.types.AggregationResult]): - The aggregation results for this batch. - more_results (google.cloud.datastore_v1.types.QueryResultBatch.MoreResultsType): - The state of the query after the current batch. Only - COUNT(*) aggregations are supported in the initial launch. - Therefore, expected result type is limited to - ``NO_MORE_RESULTS``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Read timestamp this batch was returned from. - - In a single transaction, subsequent query result - batches for the same query can have a greater - timestamp. Each batch's read timestamp is valid - for all preceding batches. - """ - - aggregation_results: MutableSequence['AggregationResult'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AggregationResult', - ) - more_results: query.QueryResultBatch.MoreResultsType = proto.Field( - proto.ENUM, - number=2, - enum=query.QueryResultBatch.MoreResultsType, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/datastore.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/datastore.py deleted file mode 100644 index c5640f2f..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/datastore.py +++ /dev/null @@ -1,1027 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datastore_v1.types import aggregation_result -from google.cloud.datastore_v1.types import entity -from google.cloud.datastore_v1.types import query as gd_query -from google.cloud.datastore_v1.types import query_profile -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.datastore.v1', - manifest={ - 'LookupRequest', - 'LookupResponse', - 'RunQueryRequest', - 'RunQueryResponse', - 'RunAggregationQueryRequest', - 'RunAggregationQueryResponse', - 'BeginTransactionRequest', - 'BeginTransactionResponse', - 'RollbackRequest', - 'RollbackResponse', - 'CommitRequest', - 'CommitResponse', - 'AllocateIdsRequest', - 'AllocateIdsResponse', - 'ReserveIdsRequest', - 'ReserveIdsResponse', - 'Mutation', - 'MutationResult', - 'ReadOptions', - 'TransactionOptions', - }, -) - - -class LookupRequest(proto.Message): - r"""The request for - [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - - Attributes: - project_id (str): - Required. The ID of the project against which - to make the request. - database_id (str): - The ID of the database against which to make - the request. - '(default)' is not allowed; please use empty - string '' to refer the default database. - read_options (google.cloud.datastore_v1.types.ReadOptions): - The options for this lookup request. - keys (MutableSequence[google.cloud.datastore_v1.types.Key]): - Required. Keys of entities to look up. - """ - - project_id: str = proto.Field( - proto.STRING, - number=8, - ) - database_id: str = proto.Field( - proto.STRING, - number=9, - ) - read_options: 'ReadOptions' = proto.Field( - proto.MESSAGE, - number=1, - message='ReadOptions', - ) - keys: MutableSequence[entity.Key] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=entity.Key, - ) - - -class LookupResponse(proto.Message): - r"""The response for - [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. - - Attributes: - found (MutableSequence[google.cloud.datastore_v1.types.EntityResult]): - Entities found as ``ResultType.FULL`` entities. The order of - results in this field is undefined and has no relation to - the order of the keys in the input. - missing (MutableSequence[google.cloud.datastore_v1.types.EntityResult]): - Entities not found as ``ResultType.KEY_ONLY`` entities. The - order of results in this field is undefined and has no - relation to the order of the keys in the input. - deferred (MutableSequence[google.cloud.datastore_v1.types.Key]): - A list of keys that were not looked up due to - resource constraints. The order of results in - this field is undefined and has no relation to - the order of the keys in the input. - transaction (bytes): - The identifier of the transaction that was started as part - of this Lookup request. - - Set only when - [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] - was set in - [LookupRequest.read_options][google.datastore.v1.LookupRequest.read_options]. - read_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which these entities were read or - found missing. - """ - - found: MutableSequence[gd_query.EntityResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gd_query.EntityResult, - ) - missing: MutableSequence[gd_query.EntityResult] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gd_query.EntityResult, - ) - deferred: MutableSequence[entity.Key] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=entity.Key, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=5, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - - -class RunQueryRequest(proto.Message): - r"""The request for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project_id (str): - Required. The ID of the project against which - to make the request. - database_id (str): - The ID of the database against which to make - the request. - '(default)' is not allowed; please use empty - string '' to refer the default database. - partition_id (google.cloud.datastore_v1.types.PartitionId): - Entities are partitioned into subsets, - identified by a partition ID. Queries are scoped - to a single partition. This partition ID is - normalized with the standard default context - partition ID. - read_options (google.cloud.datastore_v1.types.ReadOptions): - The options for this query. - query (google.cloud.datastore_v1.types.Query): - The query to run. - - This field is a member of `oneof`_ ``query_type``. - gql_query (google.cloud.datastore_v1.types.GqlQuery): - The GQL query to run. This query must be a - non-aggregation query. - - This field is a member of `oneof`_ ``query_type``. - explain_options (google.cloud.datastore_v1.types.ExplainOptions): - Optional. Explain options for the query. If - set, additional query statistics will be - returned. If not, only query results will be - returned. - """ - - project_id: str = proto.Field( - proto.STRING, - number=8, - ) - database_id: str = proto.Field( - proto.STRING, - number=9, - ) - partition_id: entity.PartitionId = proto.Field( - proto.MESSAGE, - number=2, - message=entity.PartitionId, - ) - read_options: 'ReadOptions' = proto.Field( - proto.MESSAGE, - number=1, - message='ReadOptions', - ) - query: gd_query.Query = proto.Field( - proto.MESSAGE, - number=3, - oneof='query_type', - message=gd_query.Query, - ) - gql_query: gd_query.GqlQuery = proto.Field( - proto.MESSAGE, - number=7, - oneof='query_type', - message=gd_query.GqlQuery, - ) - explain_options: query_profile.ExplainOptions = proto.Field( - proto.MESSAGE, - number=12, - message=query_profile.ExplainOptions, - ) - - -class RunQueryResponse(proto.Message): - r"""The response for - [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. - - Attributes: - batch (google.cloud.datastore_v1.types.QueryResultBatch): - A batch of query results (always present). - query (google.cloud.datastore_v1.types.Query): - The parsed form of the ``GqlQuery`` from the request, if it - was set. - transaction (bytes): - The identifier of the transaction that was started as part - of this RunQuery request. - - Set only when - [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] - was set in - [RunQueryRequest.read_options][google.datastore.v1.RunQueryRequest.read_options]. - explain_metrics (google.cloud.datastore_v1.types.ExplainMetrics): - Query explain metrics. This is only present when the - [RunQueryRequest.explain_options][google.datastore.v1.RunQueryRequest.explain_options] - is provided, and it is sent only once with the last response - in the stream. - """ - - batch: gd_query.QueryResultBatch = proto.Field( - proto.MESSAGE, - number=1, - message=gd_query.QueryResultBatch, - ) - query: gd_query.Query = proto.Field( - proto.MESSAGE, - number=2, - message=gd_query.Query, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=5, - ) - explain_metrics: query_profile.ExplainMetrics = proto.Field( - proto.MESSAGE, - number=9, - message=query_profile.ExplainMetrics, - ) - - -class RunAggregationQueryRequest(proto.Message): - r"""The request for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project_id (str): - Required. The ID of the project against which - to make the request. - database_id (str): - The ID of the database against which to make - the request. - '(default)' is not allowed; please use empty - string '' to refer the default database. - partition_id (google.cloud.datastore_v1.types.PartitionId): - Entities are partitioned into subsets, - identified by a partition ID. Queries are scoped - to a single partition. This partition ID is - normalized with the standard default context - partition ID. - read_options (google.cloud.datastore_v1.types.ReadOptions): - The options for this query. - aggregation_query (google.cloud.datastore_v1.types.AggregationQuery): - The query to run. - - This field is a member of `oneof`_ ``query_type``. - gql_query (google.cloud.datastore_v1.types.GqlQuery): - The GQL query to run. This query must be an - aggregation query. - - This field is a member of `oneof`_ ``query_type``. - explain_options (google.cloud.datastore_v1.types.ExplainOptions): - Optional. Explain options for the query. If - set, additional query statistics will be - returned. If not, only query results will be - returned. - """ - - project_id: str = proto.Field( - proto.STRING, - number=8, - ) - database_id: str = proto.Field( - proto.STRING, - number=9, - ) - partition_id: entity.PartitionId = proto.Field( - proto.MESSAGE, - number=2, - message=entity.PartitionId, - ) - read_options: 'ReadOptions' = proto.Field( - proto.MESSAGE, - number=1, - message='ReadOptions', - ) - aggregation_query: gd_query.AggregationQuery = proto.Field( - proto.MESSAGE, - number=3, - oneof='query_type', - message=gd_query.AggregationQuery, - ) - gql_query: gd_query.GqlQuery = proto.Field( - proto.MESSAGE, - number=7, - oneof='query_type', - message=gd_query.GqlQuery, - ) - explain_options: query_profile.ExplainOptions = proto.Field( - proto.MESSAGE, - number=11, - message=query_profile.ExplainOptions, - ) - - -class RunAggregationQueryResponse(proto.Message): - r"""The response for - [Datastore.RunAggregationQuery][google.datastore.v1.Datastore.RunAggregationQuery]. - - Attributes: - batch (google.cloud.datastore_v1.types.AggregationResultBatch): - A batch of aggregation results. Always - present. - query (google.cloud.datastore_v1.types.AggregationQuery): - The parsed form of the ``GqlQuery`` from the request, if it - was set. - transaction (bytes): - The identifier of the transaction that was started as part - of this RunAggregationQuery request. - - Set only when - [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] - was set in - [RunAggregationQueryRequest.read_options][google.datastore.v1.RunAggregationQueryRequest.read_options]. - explain_metrics (google.cloud.datastore_v1.types.ExplainMetrics): - Query explain metrics. This is only present when the - [RunAggregationQueryRequest.explain_options][google.datastore.v1.RunAggregationQueryRequest.explain_options] - is provided, and it is sent only once with the last response - in the stream. - """ - - batch: aggregation_result.AggregationResultBatch = proto.Field( - proto.MESSAGE, - number=1, - message=aggregation_result.AggregationResultBatch, - ) - query: gd_query.AggregationQuery = proto.Field( - proto.MESSAGE, - number=2, - message=gd_query.AggregationQuery, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=5, - ) - explain_metrics: query_profile.ExplainMetrics = proto.Field( - proto.MESSAGE, - number=9, - message=query_profile.ExplainMetrics, - ) - - -class BeginTransactionRequest(proto.Message): - r"""The request for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - Attributes: - project_id (str): - Required. The ID of the project against which - to make the request. - database_id (str): - The ID of the database against which to make - the request. - '(default)' is not allowed; please use empty - string '' to refer the default database. - transaction_options (google.cloud.datastore_v1.types.TransactionOptions): - Options for a new transaction. - """ - - project_id: str = proto.Field( - proto.STRING, - number=8, - ) - database_id: str = proto.Field( - proto.STRING, - number=9, - ) - transaction_options: 'TransactionOptions' = proto.Field( - proto.MESSAGE, - number=10, - message='TransactionOptions', - ) - - -class BeginTransactionResponse(proto.Message): - r"""The response for - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - Attributes: - transaction (bytes): - The transaction identifier (always present). - """ - - transaction: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class RollbackRequest(proto.Message): - r"""The request for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. - - Attributes: - project_id (str): - Required. The ID of the project against which - to make the request. - database_id (str): - The ID of the database against which to make - the request. - '(default)' is not allowed; please use empty - string '' to refer the default database. - transaction (bytes): - Required. The transaction identifier, returned by a call to - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - """ - - project_id: str = proto.Field( - proto.STRING, - number=8, - ) - database_id: str = proto.Field( - proto.STRING, - number=9, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class RollbackResponse(proto.Message): - r"""The response for - [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an - empty message). - - """ - - -class CommitRequest(proto.Message): - r"""The request for - [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project_id (str): - Required. The ID of the project against which - to make the request. - database_id (str): - The ID of the database against which to make - the request. - '(default)' is not allowed; please use empty - string '' to refer the default database. - mode (google.cloud.datastore_v1.types.CommitRequest.Mode): - The type of commit to perform. Defaults to - ``TRANSACTIONAL``. - transaction (bytes): - The identifier of the transaction associated with the - commit. A transaction identifier is returned by a call to - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - This field is a member of `oneof`_ ``transaction_selector``. - single_use_transaction (google.cloud.datastore_v1.types.TransactionOptions): - Options for beginning a new transaction for this request. - The transaction is committed when the request completes. If - specified, - [TransactionOptions.mode][google.datastore.v1.TransactionOptions] - must be - [TransactionOptions.ReadWrite][google.datastore.v1.TransactionOptions.ReadWrite]. - - This field is a member of `oneof`_ ``transaction_selector``. - mutations (MutableSequence[google.cloud.datastore_v1.types.Mutation]): - The mutations to perform. - - When mode is ``TRANSACTIONAL``, mutations affecting a single - entity are applied in order. The following sequences of - mutations affecting a single entity are not permitted in a - single ``Commit`` request: - - - ``insert`` followed by ``insert`` - - ``update`` followed by ``insert`` - - ``upsert`` followed by ``insert`` - - ``delete`` followed by ``update`` - - When mode is ``NON_TRANSACTIONAL``, no two mutations may - affect a single entity. - """ - class Mode(proto.Enum): - r"""The modes available for commits. - - Values: - MODE_UNSPECIFIED (0): - Unspecified. This value must not be used. - TRANSACTIONAL (1): - Transactional: The mutations are either all applied, or none - are applied. Learn about transactions - `here `__. - NON_TRANSACTIONAL (2): - Non-transactional: The mutations may not - apply as all or none. - """ - MODE_UNSPECIFIED = 0 - TRANSACTIONAL = 1 - NON_TRANSACTIONAL = 2 - - project_id: str = proto.Field( - proto.STRING, - number=8, - ) - database_id: str = proto.Field( - proto.STRING, - number=9, - ) - mode: Mode = proto.Field( - proto.ENUM, - number=5, - enum=Mode, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=1, - oneof='transaction_selector', - ) - single_use_transaction: 'TransactionOptions' = proto.Field( - proto.MESSAGE, - number=10, - oneof='transaction_selector', - message='TransactionOptions', - ) - mutations: MutableSequence['Mutation'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Mutation', - ) - - -class CommitResponse(proto.Message): - r"""The response for - [Datastore.Commit][google.datastore.v1.Datastore.Commit]. - - Attributes: - mutation_results (MutableSequence[google.cloud.datastore_v1.types.MutationResult]): - The result of performing the mutations. - The i-th mutation result corresponds to the i-th - mutation in the request. - index_updates (int): - The number of index entries updated during - the commit, or zero if none were updated. - commit_time (google.protobuf.timestamp_pb2.Timestamp): - The transaction commit timestamp. Not set for - non-transactional commits. - """ - - mutation_results: MutableSequence['MutationResult'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='MutationResult', - ) - index_updates: int = proto.Field( - proto.INT32, - number=4, - ) - commit_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - - -class AllocateIdsRequest(proto.Message): - r"""The request for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - - Attributes: - project_id (str): - Required. The ID of the project against which - to make the request. - database_id (str): - The ID of the database against which to make - the request. - '(default)' is not allowed; please use empty - string '' to refer the default database. - keys (MutableSequence[google.cloud.datastore_v1.types.Key]): - Required. A list of keys with incomplete key - paths for which to allocate IDs. No key may be - reserved/read-only. - """ - - project_id: str = proto.Field( - proto.STRING, - number=8, - ) - database_id: str = proto.Field( - proto.STRING, - number=9, - ) - keys: MutableSequence[entity.Key] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=entity.Key, - ) - - -class AllocateIdsResponse(proto.Message): - r"""The response for - [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. - - Attributes: - keys (MutableSequence[google.cloud.datastore_v1.types.Key]): - The keys specified in the request (in the - same order), each with its key path completed - with a newly allocated ID. - """ - - keys: MutableSequence[entity.Key] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=entity.Key, - ) - - -class ReserveIdsRequest(proto.Message): - r"""The request for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - - Attributes: - project_id (str): - Required. The ID of the project against which - to make the request. - database_id (str): - The ID of the database against which to make - the request. - '(default)' is not allowed; please use empty - string '' to refer the default database. - keys (MutableSequence[google.cloud.datastore_v1.types.Key]): - Required. A list of keys with complete key - paths whose numeric IDs should not be - auto-allocated. - """ - - project_id: str = proto.Field( - proto.STRING, - number=8, - ) - database_id: str = proto.Field( - proto.STRING, - number=9, - ) - keys: MutableSequence[entity.Key] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=entity.Key, - ) - - -class ReserveIdsResponse(proto.Message): - r"""The response for - [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. - - """ - - -class Mutation(proto.Message): - r"""A mutation to apply to an entity. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - insert (google.cloud.datastore_v1.types.Entity): - The entity to insert. The entity must not - already exist. The entity key's final path - element may be incomplete. - - This field is a member of `oneof`_ ``operation``. - update (google.cloud.datastore_v1.types.Entity): - The entity to update. The entity must already - exist. Must have a complete key path. - - This field is a member of `oneof`_ ``operation``. - upsert (google.cloud.datastore_v1.types.Entity): - The entity to upsert. The entity may or may - not already exist. The entity key's final path - element may be incomplete. - - This field is a member of `oneof`_ ``operation``. - delete (google.cloud.datastore_v1.types.Key): - The key of the entity to delete. The entity - may or may not already exist. Must have a - complete key path and must not be - reserved/read-only. - - This field is a member of `oneof`_ ``operation``. - base_version (int): - The version of the entity that this mutation - is being applied to. If this does not match the - current version on the server, the mutation - conflicts. - - This field is a member of `oneof`_ ``conflict_detection_strategy``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The update time of the entity that this - mutation is being applied to. If this does not - match the current update time on the server, the - mutation conflicts. - - This field is a member of `oneof`_ ``conflict_detection_strategy``. - """ - - insert: entity.Entity = proto.Field( - proto.MESSAGE, - number=4, - oneof='operation', - message=entity.Entity, - ) - update: entity.Entity = proto.Field( - proto.MESSAGE, - number=5, - oneof='operation', - message=entity.Entity, - ) - upsert: entity.Entity = proto.Field( - proto.MESSAGE, - number=6, - oneof='operation', - message=entity.Entity, - ) - delete: entity.Key = proto.Field( - proto.MESSAGE, - number=7, - oneof='operation', - message=entity.Key, - ) - base_version: int = proto.Field( - proto.INT64, - number=8, - oneof='conflict_detection_strategy', - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - oneof='conflict_detection_strategy', - message=timestamp_pb2.Timestamp, - ) - - -class MutationResult(proto.Message): - r"""The result of applying a mutation. - - Attributes: - key (google.cloud.datastore_v1.types.Key): - The automatically allocated key. - Set only when the mutation allocated a key. - version (int): - The version of the entity on the server after - processing the mutation. If the mutation doesn't - change anything on the server, then the version - will be the version of the current entity or, if - no entity is present, a version that is strictly - greater than the version of any previous entity - and less than the version of any possible future - entity. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The create time of the entity. This field - will not be set after a 'delete'. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The update time of the entity on the server - after processing the mutation. If the mutation - doesn't change anything on the server, then the - timestamp will be the update timestamp of the - current entity. This field will not be set after - a 'delete'. - conflict_detected (bool): - Whether a conflict was detected for this - mutation. Always false when a conflict detection - strategy field is not set in the mutation. - """ - - key: entity.Key = proto.Field( - proto.MESSAGE, - number=3, - message=entity.Key, - ) - version: int = proto.Field( - proto.INT64, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - conflict_detected: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class ReadOptions(proto.Message): - r"""The options shared by read requests. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_consistency (google.cloud.datastore_v1.types.ReadOptions.ReadConsistency): - The non-transactional read consistency to - use. - - This field is a member of `oneof`_ ``consistency_type``. - transaction (bytes): - The identifier of the transaction in which to read. A - transaction identifier is returned by a call to - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - - This field is a member of `oneof`_ ``consistency_type``. - new_transaction (google.cloud.datastore_v1.types.TransactionOptions): - Options for beginning a new transaction for this request. - - The new transaction identifier will be returned in the - corresponding response as either - [LookupResponse.transaction][google.datastore.v1.LookupResponse.transaction] - or - [RunQueryResponse.transaction][google.datastore.v1.RunQueryResponse.transaction]. - - This field is a member of `oneof`_ ``consistency_type``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads entities as they were at the given - time. This value is only supported for Cloud - Firestore in Datastore mode. - - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_type``. - """ - class ReadConsistency(proto.Enum): - r"""The possible values for read consistencies. - - Values: - READ_CONSISTENCY_UNSPECIFIED (0): - Unspecified. This value must not be used. - STRONG (1): - Strong consistency. - EVENTUAL (2): - Eventual consistency. - """ - READ_CONSISTENCY_UNSPECIFIED = 0 - STRONG = 1 - EVENTUAL = 2 - - read_consistency: ReadConsistency = proto.Field( - proto.ENUM, - number=1, - oneof='consistency_type', - enum=ReadConsistency, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=2, - oneof='consistency_type', - ) - new_transaction: 'TransactionOptions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='consistency_type', - message='TransactionOptions', - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - oneof='consistency_type', - message=timestamp_pb2.Timestamp, - ) - - -class TransactionOptions(proto.Message): - r"""Options for beginning a new transaction. - - Transactions can be created explicitly with calls to - [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] - or implicitly by setting - [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] - in read requests. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_write (google.cloud.datastore_v1.types.TransactionOptions.ReadWrite): - The transaction should allow both reads and - writes. - - This field is a member of `oneof`_ ``mode``. - read_only (google.cloud.datastore_v1.types.TransactionOptions.ReadOnly): - The transaction should only allow reads. - - This field is a member of `oneof`_ ``mode``. - """ - - class ReadWrite(proto.Message): - r"""Options specific to read / write transactions. - - Attributes: - previous_transaction (bytes): - The transaction identifier of the transaction - being retried. - """ - - previous_transaction: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - class ReadOnly(proto.Message): - r"""Options specific to read-only transactions. - - Attributes: - read_time (google.protobuf.timestamp_pb2.Timestamp): - Reads entities at the given time. - - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - """ - - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - - read_write: ReadWrite = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ReadWrite, - ) - read_only: ReadOnly = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=ReadOnly, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/entity.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/entity.py deleted file mode 100644 index a0e05644..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/entity.py +++ /dev/null @@ -1,394 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import latlng_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.datastore.v1', - manifest={ - 'PartitionId', - 'Key', - 'ArrayValue', - 'Value', - 'Entity', - }, -) - - -class PartitionId(proto.Message): - r"""A partition ID identifies a grouping of entities. The grouping is - always by project and namespace, however the namespace ID may be - empty. - - A partition ID contains several dimensions: project ID and namespace - ID. - - Partition dimensions: - - - May be ``""``. - - Must be valid UTF-8 bytes. - - Must have values that match regex ``[A-Za-z\d\.\-_]{1,100}`` If - the value of any dimension matches regex ``__.*__``, the - partition is reserved/read-only. A reserved/read-only partition - ID is forbidden in certain documented contexts. - - Foreign partition IDs (in which the project ID does not match the - context project ID ) are discouraged. Reads and writes of foreign - partition IDs may fail if the project is not in an active state. - - Attributes: - project_id (str): - The ID of the project to which the entities - belong. - database_id (str): - If not empty, the ID of the database to which - the entities belong. - namespace_id (str): - If not empty, the ID of the namespace to - which the entities belong. - """ - - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - database_id: str = proto.Field( - proto.STRING, - number=3, - ) - namespace_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Key(proto.Message): - r"""A unique identifier for an entity. - If a key's partition ID or any of its path kinds or names are - reserved/read-only, the key is reserved/read-only. - A reserved/read-only key is forbidden in certain documented - contexts. - - Attributes: - partition_id (google.cloud.datastore_v1.types.PartitionId): - Entities are partitioned into subsets, - currently identified by a project ID and - namespace ID. Queries are scoped to a single - partition. - path (MutableSequence[google.cloud.datastore_v1.types.Key.PathElement]): - The entity path. An entity path consists of one or more - elements composed of a kind and a string or numerical - identifier, which identify entities. The first element - identifies a *root entity*, the second element identifies a - *child* of the root entity, the third element identifies a - child of the second entity, and so forth. The entities - identified by all prefixes of the path are called the - element's *ancestors*. - - An entity path is always fully complete: *all* of the - entity's ancestors are required to be in the path along with - the entity identifier itself. The only exception is that in - some documented cases, the identifier in the last path - element (for the entity) itself may be omitted. For example, - the last path element of the key of ``Mutation.insert`` may - have no identifier. - - A path can never be empty, and a path can have at most 100 - elements. - """ - - class PathElement(proto.Message): - r"""A (kind, ID/name) pair used to construct a key path. - - If either name or ID is set, the element is complete. If neither - is set, the element is incomplete. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - kind (str): - The kind of the entity. - - A kind matching regex ``__.*__`` is reserved/read-only. A - kind must not contain more than 1500 bytes when UTF-8 - encoded. Cannot be ``""``. - - Must be valid UTF-8 bytes. Legacy values that are not valid - UTF-8 are encoded as ``__bytes__`` where ```` is the - base-64 encoding of the bytes. - id (int): - The auto-allocated ID of the entity. - - Never equal to zero. Values less than zero are - discouraged and may not be supported in the - future. - - This field is a member of `oneof`_ ``id_type``. - name (str): - The name of the entity. - - A name matching regex ``__.*__`` is reserved/read-only. A - name must not be more than 1500 bytes when UTF-8 encoded. - Cannot be ``""``. - - Must be valid UTF-8 bytes. Legacy values that are not valid - UTF-8 are encoded as ``__bytes__`` where ```` is the - base-64 encoding of the bytes. - - This field is a member of `oneof`_ ``id_type``. - """ - - kind: str = proto.Field( - proto.STRING, - number=1, - ) - id: int = proto.Field( - proto.INT64, - number=2, - oneof='id_type', - ) - name: str = proto.Field( - proto.STRING, - number=3, - oneof='id_type', - ) - - partition_id: 'PartitionId' = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - path: MutableSequence[PathElement] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=PathElement, - ) - - -class ArrayValue(proto.Message): - r"""An array value. - - Attributes: - values (MutableSequence[google.cloud.datastore_v1.types.Value]): - Values in the array. The order of values in an array is - preserved as long as all values have identical settings for - 'exclude_from_indexes'. - """ - - values: MutableSequence['Value'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - - -class Value(proto.Message): - r"""A message that can hold any of the supported value types and - associated metadata. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - null_value (google.protobuf.struct_pb2.NullValue): - A null value. - - This field is a member of `oneof`_ ``value_type``. - boolean_value (bool): - A boolean value. - - This field is a member of `oneof`_ ``value_type``. - integer_value (int): - An integer value. - - This field is a member of `oneof`_ ``value_type``. - double_value (float): - A double value. - - This field is a member of `oneof`_ ``value_type``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - A timestamp value. - When stored in the Datastore, precise only to - microseconds; any additional precision is - rounded down. - - This field is a member of `oneof`_ ``value_type``. - key_value (google.cloud.datastore_v1.types.Key): - A key value. - - This field is a member of `oneof`_ ``value_type``. - string_value (str): - A UTF-8 encoded string value. When ``exclude_from_indexes`` - is false (it is indexed) , may have at most 1500 bytes. - Otherwise, may be set to at most 1,000,000 bytes. - - This field is a member of `oneof`_ ``value_type``. - blob_value (bytes): - A blob value. May have at most 1,000,000 bytes. When - ``exclude_from_indexes`` is false, may have at most 1500 - bytes. In JSON requests, must be base64-encoded. - - This field is a member of `oneof`_ ``value_type``. - geo_point_value (google.type.latlng_pb2.LatLng): - A geo point value representing a point on the - surface of Earth. - - This field is a member of `oneof`_ ``value_type``. - entity_value (google.cloud.datastore_v1.types.Entity): - An entity value. - - - May have no key. - - May have a key with an incomplete key path. - - May have a reserved/read-only key. - - This field is a member of `oneof`_ ``value_type``. - array_value (google.cloud.datastore_v1.types.ArrayValue): - An array value. Cannot contain another array value. A - ``Value`` instance that sets field ``array_value`` must not - set fields ``meaning`` or ``exclude_from_indexes``. - - This field is a member of `oneof`_ ``value_type``. - meaning (int): - The ``meaning`` field should only be populated for backwards - compatibility. - exclude_from_indexes (bool): - If the value should be excluded from all - indexes including those defined explicitly. - """ - - null_value: struct_pb2.NullValue = proto.Field( - proto.ENUM, - number=11, - oneof='value_type', - enum=struct_pb2.NullValue, - ) - boolean_value: bool = proto.Field( - proto.BOOL, - number=1, - oneof='value_type', - ) - integer_value: int = proto.Field( - proto.INT64, - number=2, - oneof='value_type', - ) - double_value: float = proto.Field( - proto.DOUBLE, - number=3, - oneof='value_type', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - oneof='value_type', - message=timestamp_pb2.Timestamp, - ) - key_value: 'Key' = proto.Field( - proto.MESSAGE, - number=5, - oneof='value_type', - message='Key', - ) - string_value: str = proto.Field( - proto.STRING, - number=17, - oneof='value_type', - ) - blob_value: bytes = proto.Field( - proto.BYTES, - number=18, - oneof='value_type', - ) - geo_point_value: latlng_pb2.LatLng = proto.Field( - proto.MESSAGE, - number=8, - oneof='value_type', - message=latlng_pb2.LatLng, - ) - entity_value: 'Entity' = proto.Field( - proto.MESSAGE, - number=6, - oneof='value_type', - message='Entity', - ) - array_value: 'ArrayValue' = proto.Field( - proto.MESSAGE, - number=9, - oneof='value_type', - message='ArrayValue', - ) - meaning: int = proto.Field( - proto.INT32, - number=14, - ) - exclude_from_indexes: bool = proto.Field( - proto.BOOL, - number=19, - ) - - -class Entity(proto.Message): - r"""A Datastore data object. - - Must not exceed 1 MiB - 4 bytes. - - Attributes: - key (google.cloud.datastore_v1.types.Key): - The entity's key. - - An entity must have a key, unless otherwise documented (for - example, an entity in ``Value.entity_value`` may have no - key). An entity's kind is its key path's last element's - kind, or null if it has no key. - properties (MutableMapping[str, google.cloud.datastore_v1.types.Value]): - The entity's properties. The map's keys are property names. - A property name matching regex ``__.*__`` is reserved. A - reserved property name is forbidden in certain documented - contexts. The map keys, represented as UTF-8, must not - exceed 1,500 bytes and cannot be empty. - """ - - key: 'Key' = proto.Field( - proto.MESSAGE, - number=1, - message='Key', - ) - properties: MutableMapping[str, 'Value'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message='Value', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/query.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/query.py deleted file mode 100644 index c3f63873..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/query.py +++ /dev/null @@ -1,904 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datastore_v1.types import entity as gd_entity -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.datastore.v1', - manifest={ - 'EntityResult', - 'Query', - 'AggregationQuery', - 'KindExpression', - 'PropertyReference', - 'Projection', - 'PropertyOrder', - 'Filter', - 'CompositeFilter', - 'PropertyFilter', - 'GqlQuery', - 'GqlQueryParameter', - 'QueryResultBatch', - }, -) - - -class EntityResult(proto.Message): - r"""The result of fetching an entity from Datastore. - - Attributes: - entity (google.cloud.datastore_v1.types.Entity): - The resulting entity. - version (int): - The version of the entity, a strictly positive number that - monotonically increases with changes to the entity. - - This field is set for - [``FULL``][google.datastore.v1.EntityResult.ResultType.FULL] - entity results. - - For [missing][google.datastore.v1.LookupResponse.missing] - entities in ``LookupResponse``, this is the version of the - snapshot that was used to look up the entity, and it is - always set except for eventually consistent reads. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the entity was created. This field is set - for - [``FULL``][google.datastore.v1.EntityResult.ResultType.FULL] - entity results. If this entity is missing, this field will - not be set. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the entity was last changed. This field is - set for - [``FULL``][google.datastore.v1.EntityResult.ResultType.FULL] - entity results. If this entity is missing, this field will - not be set. - cursor (bytes): - A cursor that points to the position after the result - entity. Set only when the ``EntityResult`` is part of a - ``QueryResultBatch`` message. - """ - class ResultType(proto.Enum): - r"""Specifies what data the 'entity' field contains. A ``ResultType`` is - either implied (for example, in ``LookupResponse.missing`` from - ``datastore.proto``, it is always ``KEY_ONLY``) or specified by - context (for example, in message ``QueryResultBatch``, field - ``entity_result_type`` specifies a ``ResultType`` for all the values - in field ``entity_results``). - - Values: - RESULT_TYPE_UNSPECIFIED (0): - Unspecified. This value is never used. - FULL (1): - The key and properties. - PROJECTION (2): - A projected subset of properties. The entity - may have no key. - KEY_ONLY (3): - Only the key. - """ - RESULT_TYPE_UNSPECIFIED = 0 - FULL = 1 - PROJECTION = 2 - KEY_ONLY = 3 - - entity: gd_entity.Entity = proto.Field( - proto.MESSAGE, - number=1, - message=gd_entity.Entity, - ) - version: int = proto.Field( - proto.INT64, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - cursor: bytes = proto.Field( - proto.BYTES, - number=3, - ) - - -class Query(proto.Message): - r"""A query for entities. - - Attributes: - projection (MutableSequence[google.cloud.datastore_v1.types.Projection]): - The projection to return. Defaults to - returning all properties. - kind (MutableSequence[google.cloud.datastore_v1.types.KindExpression]): - The kinds to query (if empty, returns - entities of all kinds). Currently at most 1 kind - may be specified. - filter (google.cloud.datastore_v1.types.Filter): - The filter to apply. - order (MutableSequence[google.cloud.datastore_v1.types.PropertyOrder]): - The order to apply to the query results (if - empty, order is unspecified). - distinct_on (MutableSequence[google.cloud.datastore_v1.types.PropertyReference]): - The properties to make distinct. The query results will - contain the first result for each distinct combination of - values for the given properties (if empty, all results are - returned). - - Requires: - - - If ``order`` is specified, the set of distinct on - properties must appear before the non-distinct on - properties in ``order``. - start_cursor (bytes): - A starting point for the query results. Query cursors are - returned in query result batches and `can only be used to - continue the same - query `__. - end_cursor (bytes): - An ending point for the query results. Query cursors are - returned in query result batches and `can only be used to - limit the same - query `__. - offset (int): - The number of results to skip. Applies before - limit, but after all other constraints. - Optional. Must be >= 0 if specified. - limit (google.protobuf.wrappers_pb2.Int32Value): - The maximum number of results to return. - Applies after all other constraints. Optional. - Unspecified is interpreted as no limit. - Must be >= 0 if specified. - """ - - projection: MutableSequence['Projection'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Projection', - ) - kind: MutableSequence['KindExpression'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='KindExpression', - ) - filter: 'Filter' = proto.Field( - proto.MESSAGE, - number=4, - message='Filter', - ) - order: MutableSequence['PropertyOrder'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='PropertyOrder', - ) - distinct_on: MutableSequence['PropertyReference'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='PropertyReference', - ) - start_cursor: bytes = proto.Field( - proto.BYTES, - number=7, - ) - end_cursor: bytes = proto.Field( - proto.BYTES, - number=8, - ) - offset: int = proto.Field( - proto.INT32, - number=10, - ) - limit: wrappers_pb2.Int32Value = proto.Field( - proto.MESSAGE, - number=12, - message=wrappers_pb2.Int32Value, - ) - - -class AggregationQuery(proto.Message): - r"""Datastore query for running an aggregation over a - [Query][google.datastore.v1.Query]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - nested_query (google.cloud.datastore_v1.types.Query): - Nested query for aggregation - - This field is a member of `oneof`_ ``query_type``. - aggregations (MutableSequence[google.cloud.datastore_v1.types.AggregationQuery.Aggregation]): - Optional. Series of aggregations to apply over the results - of the ``nested_query``. - - Requires: - - - A minimum of one and maximum of five aggregations per - query. - """ - - class Aggregation(proto.Message): - r"""Defines an aggregation that produces a single result. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - count (google.cloud.datastore_v1.types.AggregationQuery.Aggregation.Count): - Count aggregator. - - This field is a member of `oneof`_ ``operator``. - sum (google.cloud.datastore_v1.types.AggregationQuery.Aggregation.Sum): - Sum aggregator. - - This field is a member of `oneof`_ ``operator``. - avg (google.cloud.datastore_v1.types.AggregationQuery.Aggregation.Avg): - Average aggregator. - - This field is a member of `oneof`_ ``operator``. - alias (str): - Optional. Optional name of the property to store the result - of the aggregation. - - If not provided, Datastore will pick a default name - following the format ``property_``. For - example: - - :: - - AGGREGATE - COUNT_UP_TO(1) AS count_up_to_1, - COUNT_UP_TO(2), - COUNT_UP_TO(3) AS count_up_to_3, - COUNT(*) - OVER ( - ... - ); - - becomes: - - :: - - AGGREGATE - COUNT_UP_TO(1) AS count_up_to_1, - COUNT_UP_TO(2) AS property_1, - COUNT_UP_TO(3) AS count_up_to_3, - COUNT(*) AS property_2 - OVER ( - ... - ); - - Requires: - - - Must be unique across all aggregation aliases. - - Conform to [entity property - name][google.datastore.v1.Entity.properties] limitations. - """ - - class Count(proto.Message): - r"""Count of entities that match the query. - - The ``COUNT(*)`` aggregation function operates on the entire entity - so it does not require a field reference. - - Attributes: - up_to (google.protobuf.wrappers_pb2.Int64Value): - Optional. Optional constraint on the maximum number of - entities to count. - - This provides a way to set an upper bound on the number of - entities to scan, limiting latency, and cost. - - Unspecified is interpreted as no bound. - - If a zero value is provided, a count result of zero should - always be expected. - - High-Level Example: - - :: - - AGGREGATE COUNT_UP_TO(1000) OVER ( SELECT * FROM k ); - - Requires: - - - Must be non-negative when present. - """ - - up_to: wrappers_pb2.Int64Value = proto.Field( - proto.MESSAGE, - number=1, - message=wrappers_pb2.Int64Value, - ) - - class Sum(proto.Message): - r"""Sum of the values of the requested property. - - - Only numeric values will be aggregated. All non-numeric values - including ``NULL`` are skipped. - - - If the aggregated values contain ``NaN``, returns ``NaN``. - Infinity math follows IEEE-754 standards. - - - If the aggregated value set is empty, returns 0. - - - Returns a 64-bit integer if all aggregated numbers are integers - and the sum result does not overflow. Otherwise, the result is - returned as a double. Note that even if all the aggregated values - are integers, the result is returned as a double if it cannot fit - within a 64-bit signed integer. When this occurs, the returned - value will lose precision. - - - When underflow occurs, floating-point aggregation is - non-deterministic. This means that running the same query - repeatedly without any changes to the underlying values could - produce slightly different results each time. In those cases, - values should be stored as integers over floating-point numbers. - - Attributes: - property (google.cloud.datastore_v1.types.PropertyReference): - The property to aggregate on. - """ - - property: 'PropertyReference' = proto.Field( - proto.MESSAGE, - number=1, - message='PropertyReference', - ) - - class Avg(proto.Message): - r"""Average of the values of the requested property. - - - Only numeric values will be aggregated. All non-numeric values - including ``NULL`` are skipped. - - - If the aggregated values contain ``NaN``, returns ``NaN``. - Infinity math follows IEEE-754 standards. - - - If the aggregated value set is empty, returns ``NULL``. - - - Always returns the result as a double. - - Attributes: - property (google.cloud.datastore_v1.types.PropertyReference): - The property to aggregate on. - """ - - property: 'PropertyReference' = proto.Field( - proto.MESSAGE, - number=1, - message='PropertyReference', - ) - - count: 'AggregationQuery.Aggregation.Count' = proto.Field( - proto.MESSAGE, - number=1, - oneof='operator', - message='AggregationQuery.Aggregation.Count', - ) - sum: 'AggregationQuery.Aggregation.Sum' = proto.Field( - proto.MESSAGE, - number=2, - oneof='operator', - message='AggregationQuery.Aggregation.Sum', - ) - avg: 'AggregationQuery.Aggregation.Avg' = proto.Field( - proto.MESSAGE, - number=3, - oneof='operator', - message='AggregationQuery.Aggregation.Avg', - ) - alias: str = proto.Field( - proto.STRING, - number=7, - ) - - nested_query: 'Query' = proto.Field( - proto.MESSAGE, - number=1, - oneof='query_type', - message='Query', - ) - aggregations: MutableSequence[Aggregation] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Aggregation, - ) - - -class KindExpression(proto.Message): - r"""A representation of a kind. - - Attributes: - name (str): - The name of the kind. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class PropertyReference(proto.Message): - r"""A reference to a property relative to the kind expressions. - - Attributes: - name (str): - A reference to a property. - - Requires: - - - MUST be a dot-delimited (``.``) string of segments, where - each segment conforms to [entity property - name][google.datastore.v1.Entity.properties] limitations. - """ - - name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Projection(proto.Message): - r"""A representation of a property in a projection. - - Attributes: - property (google.cloud.datastore_v1.types.PropertyReference): - The property to project. - """ - - property: 'PropertyReference' = proto.Field( - proto.MESSAGE, - number=1, - message='PropertyReference', - ) - - -class PropertyOrder(proto.Message): - r"""The desired order for a specific property. - - Attributes: - property (google.cloud.datastore_v1.types.PropertyReference): - The property to order by. - direction (google.cloud.datastore_v1.types.PropertyOrder.Direction): - The direction to order by. Defaults to ``ASCENDING``. - """ - class Direction(proto.Enum): - r"""The sort direction. - - Values: - DIRECTION_UNSPECIFIED (0): - Unspecified. This value must not be used. - ASCENDING (1): - Ascending. - DESCENDING (2): - Descending. - """ - DIRECTION_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - property: 'PropertyReference' = proto.Field( - proto.MESSAGE, - number=1, - message='PropertyReference', - ) - direction: Direction = proto.Field( - proto.ENUM, - number=2, - enum=Direction, - ) - - -class Filter(proto.Message): - r"""A holder for any type of filter. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - composite_filter (google.cloud.datastore_v1.types.CompositeFilter): - A composite filter. - - This field is a member of `oneof`_ ``filter_type``. - property_filter (google.cloud.datastore_v1.types.PropertyFilter): - A filter on a property. - - This field is a member of `oneof`_ ``filter_type``. - """ - - composite_filter: 'CompositeFilter' = proto.Field( - proto.MESSAGE, - number=1, - oneof='filter_type', - message='CompositeFilter', - ) - property_filter: 'PropertyFilter' = proto.Field( - proto.MESSAGE, - number=2, - oneof='filter_type', - message='PropertyFilter', - ) - - -class CompositeFilter(proto.Message): - r"""A filter that merges multiple other filters using the given - operator. - - Attributes: - op (google.cloud.datastore_v1.types.CompositeFilter.Operator): - The operator for combining multiple filters. - filters (MutableSequence[google.cloud.datastore_v1.types.Filter]): - The list of filters to combine. - - Requires: - - - At least one filter is present. - """ - class Operator(proto.Enum): - r"""A composite filter operator. - - Values: - OPERATOR_UNSPECIFIED (0): - Unspecified. This value must not be used. - AND (1): - The results are required to satisfy each of - the combined filters. - OR (2): - Documents are required to satisfy at least - one of the combined filters. - """ - OPERATOR_UNSPECIFIED = 0 - AND = 1 - OR = 2 - - op: Operator = proto.Field( - proto.ENUM, - number=1, - enum=Operator, - ) - filters: MutableSequence['Filter'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Filter', - ) - - -class PropertyFilter(proto.Message): - r"""A filter on a specific property. - - Attributes: - property (google.cloud.datastore_v1.types.PropertyReference): - The property to filter by. - op (google.cloud.datastore_v1.types.PropertyFilter.Operator): - The operator to filter by. - value (google.cloud.datastore_v1.types.Value): - The value to compare the property to. - """ - class Operator(proto.Enum): - r"""A property filter operator. - - Values: - OPERATOR_UNSPECIFIED (0): - Unspecified. This value must not be used. - LESS_THAN (1): - The given ``property`` is less than the given ``value``. - - Requires: - - - That ``property`` comes first in ``order_by``. - LESS_THAN_OR_EQUAL (2): - The given ``property`` is less than or equal to the given - ``value``. - - Requires: - - - That ``property`` comes first in ``order_by``. - GREATER_THAN (3): - The given ``property`` is greater than the given ``value``. - - Requires: - - - That ``property`` comes first in ``order_by``. - GREATER_THAN_OR_EQUAL (4): - The given ``property`` is greater than or equal to the given - ``value``. - - Requires: - - - That ``property`` comes first in ``order_by``. - EQUAL (5): - The given ``property`` is equal to the given ``value``. - IN (6): - The given ``property`` is equal to at least one value in the - given array. - - Requires: - - - That ``value`` is a non-empty ``ArrayValue``, subject to - disjunction limits. - - No ``NOT_IN`` is in the same query. - NOT_EQUAL (9): - The given ``property`` is not equal to the given ``value``. - - Requires: - - - No other ``NOT_EQUAL`` or ``NOT_IN`` is in the same - query. - - That ``property`` comes first in the ``order_by``. - HAS_ANCESTOR (11): - Limit the result set to the given entity and its - descendants. - - Requires: - - - That ``value`` is an entity key. - - All evaluated disjunctions must have the same - ``HAS_ANCESTOR`` filter. - NOT_IN (13): - The value of the ``property`` is not in the given array. - - Requires: - - - That ``value`` is a non-empty ``ArrayValue`` with at most - 10 values. - - No other ``OR``, ``IN``, ``NOT_IN``, ``NOT_EQUAL`` is in - the same query. - - That ``field`` comes first in the ``order_by``. - """ - OPERATOR_UNSPECIFIED = 0 - LESS_THAN = 1 - LESS_THAN_OR_EQUAL = 2 - GREATER_THAN = 3 - GREATER_THAN_OR_EQUAL = 4 - EQUAL = 5 - IN = 6 - NOT_EQUAL = 9 - HAS_ANCESTOR = 11 - NOT_IN = 13 - - property: 'PropertyReference' = proto.Field( - proto.MESSAGE, - number=1, - message='PropertyReference', - ) - op: Operator = proto.Field( - proto.ENUM, - number=2, - enum=Operator, - ) - value: gd_entity.Value = proto.Field( - proto.MESSAGE, - number=3, - message=gd_entity.Value, - ) - - -class GqlQuery(proto.Message): - r"""A `GQL - query `__. - - Attributes: - query_string (str): - A string of the format described - `here `__. - allow_literals (bool): - When false, the query string must not contain any literals - and instead must bind all values. For example, - ``SELECT * FROM Kind WHERE a = 'string literal'`` is not - allowed, while ``SELECT * FROM Kind WHERE a = @value`` is. - named_bindings (MutableMapping[str, google.cloud.datastore_v1.types.GqlQueryParameter]): - For each non-reserved named binding site in the query - string, there must be a named parameter with that name, but - not necessarily the inverse. - - Key must match regex ``[A-Za-z_$][A-Za-z_$0-9]*``, must not - match regex ``__.*__``, and must not be ``""``. - positional_bindings (MutableSequence[google.cloud.datastore_v1.types.GqlQueryParameter]): - Numbered binding site @1 references the first numbered - parameter, effectively using 1-based indexing, rather than - the usual 0. - - For each binding site numbered i in ``query_string``, there - must be an i-th numbered parameter. The inverse must also be - true. - """ - - query_string: str = proto.Field( - proto.STRING, - number=1, - ) - allow_literals: bool = proto.Field( - proto.BOOL, - number=2, - ) - named_bindings: MutableMapping[str, 'GqlQueryParameter'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=5, - message='GqlQueryParameter', - ) - positional_bindings: MutableSequence['GqlQueryParameter'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='GqlQueryParameter', - ) - - -class GqlQueryParameter(proto.Message): - r"""A binding parameter for a GQL query. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - value (google.cloud.datastore_v1.types.Value): - A value parameter. - - This field is a member of `oneof`_ ``parameter_type``. - cursor (bytes): - A query cursor. Query cursors are returned in - query result batches. - - This field is a member of `oneof`_ ``parameter_type``. - """ - - value: gd_entity.Value = proto.Field( - proto.MESSAGE, - number=2, - oneof='parameter_type', - message=gd_entity.Value, - ) - cursor: bytes = proto.Field( - proto.BYTES, - number=3, - oneof='parameter_type', - ) - - -class QueryResultBatch(proto.Message): - r"""A batch of results produced by a query. - - Attributes: - skipped_results (int): - The number of results skipped, typically - because of an offset. - skipped_cursor (bytes): - A cursor that points to the position after the last skipped - result. Will be set when ``skipped_results`` != 0. - entity_result_type (google.cloud.datastore_v1.types.EntityResult.ResultType): - The result type for every entity in ``entity_results``. - entity_results (MutableSequence[google.cloud.datastore_v1.types.EntityResult]): - The results for this batch. - end_cursor (bytes): - A cursor that points to the position after - the last result in the batch. - more_results (google.cloud.datastore_v1.types.QueryResultBatch.MoreResultsType): - The state of the query after the current - batch. - snapshot_version (int): - The version number of the snapshot this batch was returned - from. This applies to the range of results from the query's - ``start_cursor`` (or the beginning of the query if no cursor - was given) to this batch's ``end_cursor`` (not the query's - ``end_cursor``). - - In a single transaction, subsequent query result batches for - the same query can have a greater snapshot version number. - Each batch's snapshot version is valid for all preceding - batches. The value will be zero for eventually consistent - queries. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Read timestamp this batch was returned from. This applies to - the range of results from the query's ``start_cursor`` (or - the beginning of the query if no cursor was given) to this - batch's ``end_cursor`` (not the query's ``end_cursor``). - - In a single transaction, subsequent query result batches for - the same query can have a greater timestamp. Each batch's - read timestamp is valid for all preceding batches. This - value will not be set for eventually consistent queries in - Cloud Datastore. - """ - class MoreResultsType(proto.Enum): - r"""The possible values for the ``more_results`` field. - - Values: - MORE_RESULTS_TYPE_UNSPECIFIED (0): - Unspecified. This value is never used. - NOT_FINISHED (1): - There may be additional batches to fetch from - this query. - MORE_RESULTS_AFTER_LIMIT (2): - The query is finished, but there may be more - results after the limit. - MORE_RESULTS_AFTER_CURSOR (4): - The query is finished, but there may be more - results after the end cursor. - NO_MORE_RESULTS (3): - The query is finished, and there are no more - results. - """ - MORE_RESULTS_TYPE_UNSPECIFIED = 0 - NOT_FINISHED = 1 - MORE_RESULTS_AFTER_LIMIT = 2 - MORE_RESULTS_AFTER_CURSOR = 4 - NO_MORE_RESULTS = 3 - - skipped_results: int = proto.Field( - proto.INT32, - number=6, - ) - skipped_cursor: bytes = proto.Field( - proto.BYTES, - number=3, - ) - entity_result_type: 'EntityResult.ResultType' = proto.Field( - proto.ENUM, - number=1, - enum='EntityResult.ResultType', - ) - entity_results: MutableSequence['EntityResult'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='EntityResult', - ) - end_cursor: bytes = proto.Field( - proto.BYTES, - number=4, - ) - more_results: MoreResultsType = proto.Field( - proto.ENUM, - number=5, - enum=MoreResultsType, - ) - snapshot_version: int = proto.Field( - proto.INT64, - number=7, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/query_profile.py b/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/query_profile.py deleted file mode 100644 index 5b220c1c..00000000 --- a/owl-bot-staging/datastore/v1/google/cloud/datastore_v1/types/query_profile.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.datastore.v1', - manifest={ - 'ExplainOptions', - 'ExplainMetrics', - 'PlanSummary', - 'ExecutionStats', - }, -) - - -class ExplainOptions(proto.Message): - r"""Explain options for the query. - - Attributes: - analyze (bool): - Optional. Whether to execute this query. - - When false (the default), the query will be - planned, returning only metrics from the - planning stages. - - When true, the query will be planned and - executed, returning the full query results along - with both planning and execution stage metrics. - """ - - analyze: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class ExplainMetrics(proto.Message): - r"""Explain metrics for the query. - - Attributes: - plan_summary (google.cloud.datastore_v1.types.PlanSummary): - Planning phase information for the query. - execution_stats (google.cloud.datastore_v1.types.ExecutionStats): - Aggregated stats from the execution of the query. Only - present when - [ExplainOptions.analyze][google.datastore.v1.ExplainOptions.analyze] - is set to true. - """ - - plan_summary: 'PlanSummary' = proto.Field( - proto.MESSAGE, - number=1, - message='PlanSummary', - ) - execution_stats: 'ExecutionStats' = proto.Field( - proto.MESSAGE, - number=2, - message='ExecutionStats', - ) - - -class PlanSummary(proto.Message): - r"""Planning phase information for the query. - - Attributes: - indexes_used (MutableSequence[google.protobuf.struct_pb2.Struct]): - The indexes selected for the query. For example: [ - {"query_scope": "Collection", "properties": "(foo ASC, - **name** ASC)"}, {"query_scope": "Collection", "properties": - "(bar ASC, **name** ASC)"} ] - """ - - indexes_used: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct_pb2.Struct, - ) - - -class ExecutionStats(proto.Message): - r"""Execution statistics for the query. - - Attributes: - results_returned (int): - Total number of results returned, including - documents, projections, aggregation results, - keys. - execution_duration (google.protobuf.duration_pb2.Duration): - Total time to execute the query in the - backend. - read_operations (int): - Total billable read operations. - debug_stats (google.protobuf.struct_pb2.Struct): - Debugging statistics from the execution of the query. Note - that the debugging stats are subject to change as Firestore - evolves. It could include: { "indexes_entries_scanned": - "1000", "documents_scanned": "20", "billing_details" : { - "documents_billable": "20", "index_entries_billable": - "1000", "min_query_cost": "0" } } - """ - - results_returned: int = proto.Field( - proto.INT64, - number=1, - ) - execution_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - read_operations: int = proto.Field( - proto.INT64, - number=4, - ) - debug_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=5, - message=struct_pb2.Struct, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/datastore/v1/mypy.ini b/owl-bot-staging/datastore/v1/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/datastore/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/datastore/v1/noxfile.py b/owl-bot-staging/datastore/v1/noxfile.py deleted file mode 100644 index 5fc92993..00000000 --- a/owl-bot-staging/datastore/v1/noxfile.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12" -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-datastore' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/datastore_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - -@nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): - """Run the unit test suite against pre-release versions of dependencies.""" - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/datastore_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_allocate_ids_async.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_allocate_ids_async.py deleted file mode 100644 index 3128b48c..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_allocate_ids_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AllocateIds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_AllocateIds_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -async def sample_allocate_ids(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.AllocateIdsRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.allocate_ids(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_AllocateIds_async] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_allocate_ids_sync.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_allocate_ids_sync.py deleted file mode 100644 index 0d15159b..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_allocate_ids_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AllocateIds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_AllocateIds_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -def sample_allocate_ids(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.AllocateIdsRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.allocate_ids(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_AllocateIds_sync] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_begin_transaction_async.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_begin_transaction_async.py deleted file mode 100644 index d0f17372..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_begin_transaction_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BeginTransaction -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_BeginTransaction_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -async def sample_begin_transaction(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.BeginTransactionRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_BeginTransaction_async] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_begin_transaction_sync.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_begin_transaction_sync.py deleted file mode 100644 index 17d54139..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_begin_transaction_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BeginTransaction -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_BeginTransaction_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -def sample_begin_transaction(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.BeginTransactionRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_BeginTransaction_sync] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_commit_async.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_commit_async.py deleted file mode 100644 index 67b4df77..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_commit_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Commit -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_Commit_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -async def sample_commit(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.CommitRequest( - transaction=b'transaction_blob', - project_id="project_id_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_Commit_async] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_commit_sync.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_commit_sync.py deleted file mode 100644 index 6b0391ed..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_commit_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Commit -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_Commit_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -def sample_commit(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.CommitRequest( - transaction=b'transaction_blob', - project_id="project_id_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_Commit_sync] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_lookup_async.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_lookup_async.py deleted file mode 100644 index a80b8242..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_lookup_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Lookup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_Lookup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -async def sample_lookup(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.LookupRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.lookup(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_Lookup_async] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_lookup_sync.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_lookup_sync.py deleted file mode 100644 index 876b300d..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_lookup_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Lookup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_Lookup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -def sample_lookup(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.LookupRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.lookup(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_Lookup_sync] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_reserve_ids_async.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_reserve_ids_async.py deleted file mode 100644 index 8a9a3d52..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_reserve_ids_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReserveIds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_ReserveIds_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -async def sample_reserve_ids(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.ReserveIdsRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.reserve_ids(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_ReserveIds_async] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_reserve_ids_sync.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_reserve_ids_sync.py deleted file mode 100644 index d0332194..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_reserve_ids_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReserveIds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_ReserveIds_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -def sample_reserve_ids(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.ReserveIdsRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.reserve_ids(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_ReserveIds_sync] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_rollback_async.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_rollback_async.py deleted file mode 100644 index 11260494..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_rollback_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Rollback -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_Rollback_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -async def sample_rollback(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.RollbackRequest( - project_id="project_id_value", - transaction=b'transaction_blob', - ) - - # Make the request - response = await client.rollback(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_Rollback_async] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_rollback_sync.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_rollback_sync.py deleted file mode 100644 index a4ea4378..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_rollback_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Rollback -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_Rollback_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -def sample_rollback(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.RollbackRequest( - project_id="project_id_value", - transaction=b'transaction_blob', - ) - - # Make the request - response = client.rollback(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_Rollback_sync] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_aggregation_query_async.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_aggregation_query_async.py deleted file mode 100644 index 11f8f267..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_aggregation_query_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunAggregationQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_RunAggregationQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -async def sample_run_aggregation_query(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.RunAggregationQueryRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.run_aggregation_query(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_RunAggregationQuery_async] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_aggregation_query_sync.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_aggregation_query_sync.py deleted file mode 100644 index ebccdbc5..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_aggregation_query_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunAggregationQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_RunAggregationQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -def sample_run_aggregation_query(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.RunAggregationQueryRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.run_aggregation_query(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_RunAggregationQuery_sync] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_query_async.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_query_async.py deleted file mode 100644 index 178da220..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_query_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_RunQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -async def sample_run_query(): - # Create a client - client = datastore_v1.DatastoreAsyncClient() - - # Initialize request argument(s) - request = datastore_v1.RunQueryRequest( - project_id="project_id_value", - ) - - # Make the request - response = await client.run_query(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_RunQuery_async] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_query_sync.py b/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_query_sync.py deleted file mode 100644 index 08896e2c..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/datastore_v1_generated_datastore_run_query_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RunQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_Datastore_RunQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_v1 - - -def sample_run_query(): - # Create a client - client = datastore_v1.DatastoreClient() - - # Initialize request argument(s) - request = datastore_v1.RunQueryRequest( - project_id="project_id_value", - ) - - # Make the request - response = client.run_query(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_Datastore_RunQuery_sync] diff --git a/owl-bot-staging/datastore/v1/samples/generated_samples/snippet_metadata_google.datastore.v1.json b/owl-bot-staging/datastore/v1/samples/generated_samples/snippet_metadata_google.datastore.v1.json deleted file mode 100644 index 87839baa..00000000 --- a/owl-bot-staging/datastore/v1/samples/generated_samples/snippet_metadata_google.datastore.v1.json +++ /dev/null @@ -1,1351 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.datastore.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-datastore", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient", - "shortName": "DatastoreAsyncClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient.allocate_ids", - "method": { - "fullName": "google.datastore.v1.Datastore.AllocateIds", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "AllocateIds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.AllocateIdsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "keys", - "type": "MutableSequence[google.cloud.datastore_v1.types.Key]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.AllocateIdsResponse", - "shortName": "allocate_ids" - }, - "description": "Sample for AllocateIds", - "file": "datastore_v1_generated_datastore_allocate_ids_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_AllocateIds_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_allocate_ids_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreClient", - "shortName": "DatastoreClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreClient.allocate_ids", - "method": { - "fullName": "google.datastore.v1.Datastore.AllocateIds", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "AllocateIds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.AllocateIdsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "keys", - "type": "MutableSequence[google.cloud.datastore_v1.types.Key]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.AllocateIdsResponse", - "shortName": "allocate_ids" - }, - "description": "Sample for AllocateIds", - "file": "datastore_v1_generated_datastore_allocate_ids_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_AllocateIds_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_allocate_ids_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient", - "shortName": "DatastoreAsyncClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient.begin_transaction", - "method": { - "fullName": "google.datastore.v1.Datastore.BeginTransaction", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "BeginTransaction" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.BeginTransactionRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.BeginTransactionResponse", - "shortName": "begin_transaction" - }, - "description": "Sample for BeginTransaction", - "file": "datastore_v1_generated_datastore_begin_transaction_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_BeginTransaction_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_begin_transaction_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreClient", - "shortName": "DatastoreClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreClient.begin_transaction", - "method": { - "fullName": "google.datastore.v1.Datastore.BeginTransaction", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "BeginTransaction" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.BeginTransactionRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.BeginTransactionResponse", - "shortName": "begin_transaction" - }, - "description": "Sample for BeginTransaction", - "file": "datastore_v1_generated_datastore_begin_transaction_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_BeginTransaction_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_begin_transaction_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient", - "shortName": "DatastoreAsyncClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient.commit", - "method": { - "fullName": "google.datastore.v1.Datastore.Commit", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "Commit" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.CommitRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "mode", - "type": "google.cloud.datastore_v1.types.CommitRequest.Mode" - }, - { - "name": "transaction", - "type": "bytes" - }, - { - "name": "mutations", - "type": "MutableSequence[google.cloud.datastore_v1.types.Mutation]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.CommitResponse", - "shortName": "commit" - }, - "description": "Sample for Commit", - "file": "datastore_v1_generated_datastore_commit_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_Commit_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_commit_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreClient", - "shortName": "DatastoreClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreClient.commit", - "method": { - "fullName": "google.datastore.v1.Datastore.Commit", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "Commit" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.CommitRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "mode", - "type": "google.cloud.datastore_v1.types.CommitRequest.Mode" - }, - { - "name": "transaction", - "type": "bytes" - }, - { - "name": "mutations", - "type": "MutableSequence[google.cloud.datastore_v1.types.Mutation]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.CommitResponse", - "shortName": "commit" - }, - "description": "Sample for Commit", - "file": "datastore_v1_generated_datastore_commit_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_Commit_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_commit_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient", - "shortName": "DatastoreAsyncClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient.lookup", - "method": { - "fullName": "google.datastore.v1.Datastore.Lookup", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "Lookup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.LookupRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "read_options", - "type": "google.cloud.datastore_v1.types.ReadOptions" - }, - { - "name": "keys", - "type": "MutableSequence[google.cloud.datastore_v1.types.Key]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.LookupResponse", - "shortName": "lookup" - }, - "description": "Sample for Lookup", - "file": "datastore_v1_generated_datastore_lookup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_Lookup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_lookup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreClient", - "shortName": "DatastoreClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreClient.lookup", - "method": { - "fullName": "google.datastore.v1.Datastore.Lookup", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "Lookup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.LookupRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "read_options", - "type": "google.cloud.datastore_v1.types.ReadOptions" - }, - { - "name": "keys", - "type": "MutableSequence[google.cloud.datastore_v1.types.Key]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.LookupResponse", - "shortName": "lookup" - }, - "description": "Sample for Lookup", - "file": "datastore_v1_generated_datastore_lookup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_Lookup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_lookup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient", - "shortName": "DatastoreAsyncClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient.reserve_ids", - "method": { - "fullName": "google.datastore.v1.Datastore.ReserveIds", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "ReserveIds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.ReserveIdsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "keys", - "type": "MutableSequence[google.cloud.datastore_v1.types.Key]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.ReserveIdsResponse", - "shortName": "reserve_ids" - }, - "description": "Sample for ReserveIds", - "file": "datastore_v1_generated_datastore_reserve_ids_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_ReserveIds_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_reserve_ids_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreClient", - "shortName": "DatastoreClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreClient.reserve_ids", - "method": { - "fullName": "google.datastore.v1.Datastore.ReserveIds", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "ReserveIds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.ReserveIdsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "keys", - "type": "MutableSequence[google.cloud.datastore_v1.types.Key]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.ReserveIdsResponse", - "shortName": "reserve_ids" - }, - "description": "Sample for ReserveIds", - "file": "datastore_v1_generated_datastore_reserve_ids_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_ReserveIds_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_reserve_ids_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient", - "shortName": "DatastoreAsyncClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient.rollback", - "method": { - "fullName": "google.datastore.v1.Datastore.Rollback", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "Rollback" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.RollbackRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "transaction", - "type": "bytes" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.RollbackResponse", - "shortName": "rollback" - }, - "description": "Sample for Rollback", - "file": "datastore_v1_generated_datastore_rollback_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_Rollback_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_rollback_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreClient", - "shortName": "DatastoreClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreClient.rollback", - "method": { - "fullName": "google.datastore.v1.Datastore.Rollback", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "Rollback" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.RollbackRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "transaction", - "type": "bytes" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.RollbackResponse", - "shortName": "rollback" - }, - "description": "Sample for Rollback", - "file": "datastore_v1_generated_datastore_rollback_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_Rollback_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_rollback_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient", - "shortName": "DatastoreAsyncClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient.run_aggregation_query", - "method": { - "fullName": "google.datastore.v1.Datastore.RunAggregationQuery", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "RunAggregationQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.RunAggregationQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.RunAggregationQueryResponse", - "shortName": "run_aggregation_query" - }, - "description": "Sample for RunAggregationQuery", - "file": "datastore_v1_generated_datastore_run_aggregation_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_RunAggregationQuery_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_run_aggregation_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreClient", - "shortName": "DatastoreClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreClient.run_aggregation_query", - "method": { - "fullName": "google.datastore.v1.Datastore.RunAggregationQuery", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "RunAggregationQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.RunAggregationQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.RunAggregationQueryResponse", - "shortName": "run_aggregation_query" - }, - "description": "Sample for RunAggregationQuery", - "file": "datastore_v1_generated_datastore_run_aggregation_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_RunAggregationQuery_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_run_aggregation_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient", - "shortName": "DatastoreAsyncClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreAsyncClient.run_query", - "method": { - "fullName": "google.datastore.v1.Datastore.RunQuery", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "RunQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.RunQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.RunQueryResponse", - "shortName": "run_query" - }, - "description": "Sample for RunQuery", - "file": "datastore_v1_generated_datastore_run_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_RunQuery_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_run_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_v1.DatastoreClient", - "shortName": "DatastoreClient" - }, - "fullName": "google.cloud.datastore_v1.DatastoreClient.run_query", - "method": { - "fullName": "google.datastore.v1.Datastore.RunQuery", - "service": { - "fullName": "google.datastore.v1.Datastore", - "shortName": "Datastore" - }, - "shortName": "RunQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_v1.types.RunQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_v1.types.RunQueryResponse", - "shortName": "run_query" - }, - "description": "Sample for RunQuery", - "file": "datastore_v1_generated_datastore_run_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_Datastore_RunQuery_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_run_query_sync.py" - } - ] -} diff --git a/owl-bot-staging/datastore/v1/scripts/fixup_datastore_v1_keywords.py b/owl-bot-staging/datastore/v1/scripts/fixup_datastore_v1_keywords.py deleted file mode 100644 index f0406904..00000000 --- a/owl-bot-staging/datastore/v1/scripts/fixup_datastore_v1_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class datastoreCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'allocate_ids': ('project_id', 'keys', 'database_id', ), - 'begin_transaction': ('project_id', 'database_id', 'transaction_options', ), - 'commit': ('project_id', 'database_id', 'mode', 'transaction', 'single_use_transaction', 'mutations', ), - 'lookup': ('project_id', 'keys', 'database_id', 'read_options', ), - 'reserve_ids': ('project_id', 'keys', 'database_id', ), - 'rollback': ('project_id', 'transaction', 'database_id', ), - 'run_aggregation_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'aggregation_query', 'gql_query', 'explain_options', ), - 'run_query': ('project_id', 'database_id', 'partition_id', 'read_options', 'query', 'gql_query', 'explain_options', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=datastoreCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the datastore client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/datastore/v1/setup.py b/owl-bot-staging/datastore/v1/setup.py deleted file mode 100644 index a7a145ff..00000000 --- a/owl-bot-staging/datastore/v1/setup.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-datastore' - - -description = "Google Cloud Datastore API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/datastore/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastore" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/datastore/v1/testing/constraints-3.10.txt b/owl-bot-staging/datastore/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore/v1/testing/constraints-3.11.txt b/owl-bot-staging/datastore/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore/v1/testing/constraints-3.12.txt b/owl-bot-staging/datastore/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore/v1/testing/constraints-3.7.txt b/owl-bot-staging/datastore/v1/testing/constraints-3.7.txt deleted file mode 100644 index b8a550c7..00000000 --- a/owl-bot-staging/datastore/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.19.5 diff --git a/owl-bot-staging/datastore/v1/testing/constraints-3.8.txt b/owl-bot-staging/datastore/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore/v1/testing/constraints-3.9.txt b/owl-bot-staging/datastore/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore/v1/tests/__init__.py b/owl-bot-staging/datastore/v1/tests/__init__.py deleted file mode 100644 index 7b3de311..00000000 --- a/owl-bot-staging/datastore/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore/v1/tests/unit/__init__.py b/owl-bot-staging/datastore/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de311..00000000 --- a/owl-bot-staging/datastore/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/datastore/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de311..00000000 --- a/owl-bot-staging/datastore/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore/v1/tests/unit/gapic/datastore_v1/__init__.py b/owl-bot-staging/datastore/v1/tests/unit/gapic/datastore_v1/__init__.py deleted file mode 100644 index 7b3de311..00000000 --- a/owl-bot-staging/datastore/v1/tests/unit/gapic/datastore_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore/v1/tests/unit/gapic/datastore_v1/test_datastore.py b/owl-bot-staging/datastore/v1/tests/unit/gapic/datastore_v1/test_datastore.py deleted file mode 100644 index ef5ecffe..00000000 --- a/owl-bot-staging/datastore/v1/tests/unit/gapic/datastore_v1/test_datastore.py +++ /dev/null @@ -1,5984 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.datastore_v1.services.datastore import DatastoreAsyncClient -from google.cloud.datastore_v1.services.datastore import DatastoreClient -from google.cloud.datastore_v1.services.datastore import transports -from google.cloud.datastore_v1.types import aggregation_result -from google.cloud.datastore_v1.types import datastore -from google.cloud.datastore_v1.types import entity -from google.cloud.datastore_v1.types import query -from google.cloud.datastore_v1.types import query_profile -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.type import latlng_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DatastoreClient._get_default_mtls_endpoint(None) is None - assert DatastoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DatastoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DatastoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DatastoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DatastoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DatastoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DatastoreClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DatastoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DatastoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DatastoreClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DatastoreClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DatastoreClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DatastoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DatastoreClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DatastoreClient._get_client_cert_source(None, False) is None - assert DatastoreClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DatastoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DatastoreClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DatastoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DatastoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreClient)) -@mock.patch.object(DatastoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DatastoreClient._DEFAULT_UNIVERSE - default_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DatastoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DatastoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DatastoreClient.DEFAULT_MTLS_ENDPOINT - assert DatastoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DatastoreClient._get_api_endpoint(None, None, default_universe, "always") == DatastoreClient.DEFAULT_MTLS_ENDPOINT - assert DatastoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DatastoreClient.DEFAULT_MTLS_ENDPOINT - assert DatastoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DatastoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DatastoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DatastoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DatastoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DatastoreClient._get_universe_domain(None, None) == DatastoreClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DatastoreClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), - (DatastoreClient, transports.DatastoreRestTransport, "rest"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DatastoreClient, "grpc"), - (DatastoreAsyncClient, "grpc_asyncio"), - (DatastoreClient, "rest"), -]) -def test_datastore_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datastore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://datastore.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DatastoreGrpcTransport, "grpc"), - (transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DatastoreRestTransport, "rest"), -]) -def test_datastore_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DatastoreClient, "grpc"), - (DatastoreAsyncClient, "grpc_asyncio"), - (DatastoreClient, "rest"), -]) -def test_datastore_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datastore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://datastore.googleapis.com' - ) - - -def test_datastore_client_get_transport_class(): - transport = DatastoreClient.get_transport_class() - available_transports = [ - transports.DatastoreGrpcTransport, - transports.DatastoreRestTransport, - ] - assert transport in available_transports - - transport = DatastoreClient.get_transport_class("grpc") - assert transport == transports.DatastoreGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), - (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio"), - (DatastoreClient, transports.DatastoreRestTransport, "rest"), -]) -@mock.patch.object(DatastoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreClient)) -@mock.patch.object(DatastoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAsyncClient)) -def test_datastore_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DatastoreClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DatastoreClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", "true"), - (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", "false"), - (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DatastoreClient, transports.DatastoreRestTransport, "rest", "true"), - (DatastoreClient, transports.DatastoreRestTransport, "rest", "false"), -]) -@mock.patch.object(DatastoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreClient)) -@mock.patch.object(DatastoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_datastore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DatastoreClient, DatastoreAsyncClient -]) -@mock.patch.object(DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient)) -@mock.patch.object(DatastoreAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreAsyncClient)) -def test_datastore_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DatastoreClient, DatastoreAsyncClient -]) -@mock.patch.object(DatastoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreClient)) -@mock.patch.object(DatastoreAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAsyncClient)) -def test_datastore_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DatastoreClient._DEFAULT_UNIVERSE - default_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DatastoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), - (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio"), - (DatastoreClient, transports.DatastoreRestTransport, "rest"), -]) -def test_datastore_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", grpc_helpers), - (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DatastoreClient, transports.DatastoreRestTransport, "rest", None), -]) -def test_datastore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_datastore_client_client_options_from_dict(): - with mock.patch('google.cloud.datastore_v1.services.datastore.transports.DatastoreGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DatastoreClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", grpc_helpers), - (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_datastore_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datastore.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - scopes=None, - default_host="datastore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.LookupRequest, - dict, -]) -def test_lookup(request_type, transport: str = 'grpc'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.LookupResponse( - transaction=b'transaction_blob', - ) - response = client.lookup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore.LookupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.LookupResponse) - assert response.transaction == b'transaction_blob' - - -def test_lookup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - client.lookup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() - - -def test_lookup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore.LookupRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - client.lookup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_lookup_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.LookupResponse( - transaction=b'transaction_blob', - )) - response = await client.lookup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.LookupRequest() - -@pytest.mark.asyncio -async def test_lookup_async(transport: str = 'grpc_asyncio', request_type=datastore.LookupRequest): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore.LookupResponse( - transaction=b'transaction_blob', - )) - response = await client.lookup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore.LookupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.LookupResponse) - assert response.transaction == b'transaction_blob' - - -@pytest.mark.asyncio -async def test_lookup_async_from_dict(): - await test_lookup_async(request_type=dict) - -def test_lookup_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.LookupRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - call.return_value = datastore.LookupResponse() - client.lookup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.LookupRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - call.return_value = datastore.LookupResponse() - client.lookup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_lookup_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.LookupResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.lookup( - project_id='project_id_value', - read_options=datastore.ReadOptions(read_consistency=datastore.ReadOptions.ReadConsistency.STRONG), - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].read_options - mock_val = datastore.ReadOptions(read_consistency=datastore.ReadOptions.ReadConsistency.STRONG) - assert arg == mock_val - arg = args[0].keys - mock_val = [entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))] - assert arg == mock_val - - -def test_lookup_flattened_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lookup( - datastore.LookupRequest(), - project_id='project_id_value', - read_options=datastore.ReadOptions(read_consistency=datastore.ReadOptions.ReadConsistency.STRONG), - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - -@pytest.mark.asyncio -async def test_lookup_flattened_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.LookupResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.LookupResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.lookup( - project_id='project_id_value', - read_options=datastore.ReadOptions(read_consistency=datastore.ReadOptions.ReadConsistency.STRONG), - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].read_options - mock_val = datastore.ReadOptions(read_consistency=datastore.ReadOptions.ReadConsistency.STRONG) - assert arg == mock_val - arg = args[0].keys - mock_val = [entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_lookup_flattened_error_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.lookup( - datastore.LookupRequest(), - project_id='project_id_value', - read_options=datastore.ReadOptions(read_consistency=datastore.ReadOptions.ReadConsistency.STRONG), - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.RunQueryRequest, - dict, -]) -def test_run_query(request_type, transport: str = 'grpc'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.RunQueryResponse( - transaction=b'transaction_blob', - ) - response = client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore.RunQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunQueryResponse) - assert response.transaction == b'transaction_blob' - - -def test_run_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - client.run_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() - - -def test_run_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore.RunQueryRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - client.run_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_run_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.RunQueryResponse( - transaction=b'transaction_blob', - )) - response = await client.run_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunQueryRequest() - -@pytest.mark.asyncio -async def test_run_query_async(transport: str = 'grpc_asyncio', request_type=datastore.RunQueryRequest): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore.RunQueryResponse( - transaction=b'transaction_blob', - )) - response = await client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore.RunQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunQueryResponse) - assert response.transaction == b'transaction_blob' - - -@pytest.mark.asyncio -async def test_run_query_async_from_dict(): - await test_run_query_async(request_type=dict) - -def test_run_query_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RunQueryRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - call.return_value = datastore.RunQueryResponse() - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RunQueryRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_query), - '__call__') as call: - call.return_value = datastore.RunQueryResponse() - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datastore.RunAggregationQueryRequest, - dict, -]) -def test_run_aggregation_query(request_type, transport: str = 'grpc'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.RunAggregationQueryResponse( - transaction=b'transaction_blob', - ) - response = client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore.RunAggregationQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunAggregationQueryResponse) - assert response.transaction == b'transaction_blob' - - -def test_run_aggregation_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - client.run_aggregation_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest() - - -def test_run_aggregation_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore.RunAggregationQueryRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - client.run_aggregation_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_run_aggregation_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.RunAggregationQueryResponse( - transaction=b'transaction_blob', - )) - response = await client.run_aggregation_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RunAggregationQueryRequest() - -@pytest.mark.asyncio -async def test_run_aggregation_query_async(transport: str = 'grpc_asyncio', request_type=datastore.RunAggregationQueryRequest): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore.RunAggregationQueryResponse( - transaction=b'transaction_blob', - )) - response = await client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore.RunAggregationQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunAggregationQueryResponse) - assert response.transaction == b'transaction_blob' - - -@pytest.mark.asyncio -async def test_run_aggregation_query_async_from_dict(): - await test_run_aggregation_query_async(request_type=dict) - -def test_run_aggregation_query_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RunAggregationQueryRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - call.return_value = datastore.RunAggregationQueryResponse() - client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RunAggregationQueryRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_aggregation_query), - '__call__') as call: - call.return_value = datastore.RunAggregationQueryResponse() - client.run_aggregation_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datastore.BeginTransactionRequest, - dict, -]) -def test_begin_transaction(request_type, transport: str = 'grpc'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.BeginTransactionResponse( - transaction=b'transaction_blob', - ) - response = client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.BeginTransactionResponse) - assert response.transaction == b'transaction_blob' - - -def test_begin_transaction_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() - - -def test_begin_transaction_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore.BeginTransactionRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - client.begin_transaction(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_begin_transaction_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.BeginTransactionResponse( - transaction=b'transaction_blob', - )) - response = await client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.BeginTransactionRequest() - -@pytest.mark.asyncio -async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=datastore.BeginTransactionRequest): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore.BeginTransactionResponse( - transaction=b'transaction_blob', - )) - response = await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.BeginTransactionResponse) - assert response.transaction == b'transaction_blob' - - -@pytest.mark.asyncio -async def test_begin_transaction_async_from_dict(): - await test_begin_transaction_async(request_type=dict) - -def test_begin_transaction_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.BeginTransactionRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = datastore.BeginTransactionResponse() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.BeginTransactionRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = datastore.BeginTransactionResponse() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_begin_transaction_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.BeginTransactionResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.begin_transaction( - project_id='project_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - - -def test_begin_transaction_flattened_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - datastore.BeginTransactionRequest(), - project_id='project_id_value', - ) - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.BeginTransactionResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.BeginTransactionResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.begin_transaction( - project_id='project_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_error_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.begin_transaction( - datastore.BeginTransactionRequest(), - project_id='project_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.CommitRequest, - dict, -]) -def test_commit(request_type, transport: str = 'grpc'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.CommitResponse( - index_updates=1389, - ) - response = client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.CommitResponse) - assert response.index_updates == 1389 - - -def test_commit_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() - - -def test_commit_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore.CommitRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - client.commit(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_commit_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.CommitResponse( - index_updates=1389, - )) - response = await client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.CommitRequest() - -@pytest.mark.asyncio -async def test_commit_async(transport: str = 'grpc_asyncio', request_type=datastore.CommitRequest): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore.CommitResponse( - index_updates=1389, - )) - response = await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.CommitResponse) - assert response.index_updates == 1389 - - -@pytest.mark.asyncio -async def test_commit_async_from_dict(): - await test_commit_async(request_type=dict) - -def test_commit_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.CommitRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = datastore.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.CommitRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = datastore.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_commit_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.CommitResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.commit( - project_id='project_id_value', - mode=datastore.CommitRequest.Mode.TRANSACTIONAL, - transaction=b'transaction_blob', - mutations=[datastore.Mutation(insert=entity.Entity(key=entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].mode - mock_val = datastore.CommitRequest.Mode.TRANSACTIONAL - assert arg == mock_val - arg = args[0].mutations - mock_val = [datastore.Mutation(insert=entity.Entity(key=entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))))] - assert arg == mock_val - assert args[0].transaction == b'transaction_blob' - - -def test_commit_flattened_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - datastore.CommitRequest(), - project_id='project_id_value', - mode=datastore.CommitRequest.Mode.TRANSACTIONAL, - transaction=b'transaction_blob', - mutations=[datastore.Mutation(insert=entity.Entity(key=entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))))], - ) - -@pytest.mark.asyncio -async def test_commit_flattened_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.CommitResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.CommitResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.commit( - project_id='project_id_value', - mode=datastore.CommitRequest.Mode.TRANSACTIONAL, - transaction=b'transaction_blob', - mutations=[datastore.Mutation(insert=entity.Entity(key=entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].mode - mock_val = datastore.CommitRequest.Mode.TRANSACTIONAL - assert arg == mock_val - arg = args[0].mutations - mock_val = [datastore.Mutation(insert=entity.Entity(key=entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))))] - assert arg == mock_val - assert args[0].transaction == b'transaction_blob' - -@pytest.mark.asyncio -async def test_commit_flattened_error_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.commit( - datastore.CommitRequest(), - project_id='project_id_value', - mode=datastore.CommitRequest.Mode.TRANSACTIONAL, - transaction=b'transaction_blob', - mutations=[datastore.Mutation(insert=entity.Entity(key=entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))))], - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.RollbackRequest, - dict, -]) -def test_rollback(request_type, transport: str = 'grpc'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.RollbackResponse( - ) - response = client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RollbackResponse) - - -def test_rollback_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() - - -def test_rollback_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore.RollbackRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - client.rollback(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_rollback_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.RollbackResponse( - )) - response = await client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.RollbackRequest() - -@pytest.mark.asyncio -async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=datastore.RollbackRequest): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore.RollbackResponse( - )) - response = await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RollbackResponse) - - -@pytest.mark.asyncio -async def test_rollback_async_from_dict(): - await test_rollback_async(request_type=dict) - -def test_rollback_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RollbackRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = datastore.RollbackResponse() - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.RollbackRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = datastore.RollbackResponse() - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_rollback_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.RollbackResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rollback( - project_id='project_id_value', - transaction=b'transaction_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].transaction - mock_val = b'transaction_blob' - assert arg == mock_val - - -def test_rollback_flattened_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - datastore.RollbackRequest(), - project_id='project_id_value', - transaction=b'transaction_blob', - ) - -@pytest.mark.asyncio -async def test_rollback_flattened_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.RollbackResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.RollbackResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rollback( - project_id='project_id_value', - transaction=b'transaction_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].transaction - mock_val = b'transaction_blob' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rollback_flattened_error_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rollback( - datastore.RollbackRequest(), - project_id='project_id_value', - transaction=b'transaction_blob', - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.AllocateIdsRequest, - dict, -]) -def test_allocate_ids(request_type, transport: str = 'grpc'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.AllocateIdsResponse( - ) - response = client.allocate_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore.AllocateIdsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.AllocateIdsResponse) - - -def test_allocate_ids_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - client.allocate_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() - - -def test_allocate_ids_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore.AllocateIdsRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - client.allocate_ids(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_allocate_ids_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.AllocateIdsResponse( - )) - response = await client.allocate_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.AllocateIdsRequest() - -@pytest.mark.asyncio -async def test_allocate_ids_async(transport: str = 'grpc_asyncio', request_type=datastore.AllocateIdsRequest): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore.AllocateIdsResponse( - )) - response = await client.allocate_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore.AllocateIdsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.AllocateIdsResponse) - - -@pytest.mark.asyncio -async def test_allocate_ids_async_from_dict(): - await test_allocate_ids_async(request_type=dict) - -def test_allocate_ids_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.AllocateIdsRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - call.return_value = datastore.AllocateIdsResponse() - client.allocate_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.AllocateIdsRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - call.return_value = datastore.AllocateIdsResponse() - client.allocate_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_allocate_ids_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.AllocateIdsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.allocate_ids( - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].keys - mock_val = [entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))] - assert arg == mock_val - - -def test_allocate_ids_flattened_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.allocate_ids( - datastore.AllocateIdsRequest(), - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - -@pytest.mark.asyncio -async def test_allocate_ids_flattened_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.allocate_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.AllocateIdsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.AllocateIdsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.allocate_ids( - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].keys - mock_val = [entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_allocate_ids_flattened_error_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.allocate_ids( - datastore.AllocateIdsRequest(), - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.ReserveIdsRequest, - dict, -]) -def test_reserve_ids(request_type, transport: str = 'grpc'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.ReserveIdsResponse( - ) - response = client.reserve_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore.ReserveIdsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.ReserveIdsResponse) - - -def test_reserve_ids_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - client.reserve_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() - - -def test_reserve_ids_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore.ReserveIdsRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - client.reserve_ids(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest( - project_id='project_id_value', - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_reserve_ids_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.ReserveIdsResponse( - )) - response = await client.reserve_ids() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore.ReserveIdsRequest() - -@pytest.mark.asyncio -async def test_reserve_ids_async(transport: str = 'grpc_asyncio', request_type=datastore.ReserveIdsRequest): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore.ReserveIdsResponse( - )) - response = await client.reserve_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore.ReserveIdsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.ReserveIdsResponse) - - -@pytest.mark.asyncio -async def test_reserve_ids_async_from_dict(): - await test_reserve_ids_async(request_type=dict) - -def test_reserve_ids_routing_parameters(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.ReserveIdsRequest(**{"project_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - call.return_value = datastore.ReserveIdsResponse() - client.reserve_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore.ReserveIdsRequest(**{"database_id": "sample1"}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - call.return_value = datastore.ReserveIdsResponse() - client.reserve_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - - -def test_reserve_ids_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.ReserveIdsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.reserve_ids( - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].keys - mock_val = [entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))] - assert arg == mock_val - - -def test_reserve_ids_flattened_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reserve_ids( - datastore.ReserveIdsRequest(), - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - -@pytest.mark.asyncio -async def test_reserve_ids_flattened_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reserve_ids), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore.ReserveIdsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore.ReserveIdsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.reserve_ids( - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].keys - mock_val = [entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_reserve_ids_flattened_error_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.reserve_ids( - datastore.ReserveIdsRequest(), - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.LookupRequest, - dict, -]) -def test_lookup_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.LookupResponse( - transaction=b'transaction_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.LookupResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.lookup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.LookupResponse) - assert response.transaction == b'transaction_blob' - - -def test_lookup_rest_required_fields(request_type=datastore.LookupRequest): - transport_class = transports.DatastoreRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).lookup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).lookup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datastore.LookupResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datastore.LookupResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.lookup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_lookup_rest_unset_required_fields(): - transport = transports.DatastoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.lookup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "keys", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreRestInterceptor, "post_lookup") as post, \ - mock.patch.object(transports.DatastoreRestInterceptor, "pre_lookup") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.LookupRequest.pb(datastore.LookupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.LookupResponse.to_json(datastore.LookupResponse()) - - request = datastore.LookupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.LookupResponse() - - client.lookup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_lookup_rest_bad_request(transport: str = 'rest', request_type=datastore.LookupRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lookup(request) - - -def test_lookup_rest_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.LookupResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - read_options=datastore.ReadOptions(read_consistency=datastore.ReadOptions.ReadConsistency.STRONG), - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.LookupResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.lookup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}:lookup" % client.transport._host, args[1]) - - -def test_lookup_rest_flattened_error(transport: str = 'rest'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lookup( - datastore.LookupRequest(), - project_id='project_id_value', - read_options=datastore.ReadOptions(read_consistency=datastore.ReadOptions.ReadConsistency.STRONG), - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - -def test_lookup_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.RunQueryRequest, - dict, -]) -def test_run_query_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.RunQueryResponse( - transaction=b'transaction_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.run_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunQueryResponse) - assert response.transaction == b'transaction_blob' - - -def test_run_query_rest_required_fields(request_type=datastore.RunQueryRequest): - transport_class = transports.DatastoreRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datastore.RunQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datastore.RunQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.run_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_run_query_rest_unset_required_fields(): - transport = transports.DatastoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.run_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_query_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreRestInterceptor, "post_run_query") as post, \ - mock.patch.object(transports.DatastoreRestInterceptor, "pre_run_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.RunQueryRequest.pb(datastore.RunQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.RunQueryResponse.to_json(datastore.RunQueryResponse()) - - request = datastore.RunQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.RunQueryResponse() - - client.run_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_query_rest_bad_request(transport: str = 'rest', request_type=datastore.RunQueryRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_query(request) - - -def test_run_query_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.RunAggregationQueryRequest, - dict, -]) -def test_run_aggregation_query_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.RunAggregationQueryResponse( - transaction=b'transaction_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.run_aggregation_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RunAggregationQueryResponse) - assert response.transaction == b'transaction_blob' - - -def test_run_aggregation_query_rest_required_fields(request_type=datastore.RunAggregationQueryRequest): - transport_class = transports.DatastoreRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_aggregation_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_aggregation_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datastore.RunAggregationQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datastore.RunAggregationQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.run_aggregation_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_run_aggregation_query_rest_unset_required_fields(): - transport = transports.DatastoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_aggregation_query_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreRestInterceptor, "post_run_aggregation_query") as post, \ - mock.patch.object(transports.DatastoreRestInterceptor, "pre_run_aggregation_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.RunAggregationQueryRequest.pb(datastore.RunAggregationQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.RunAggregationQueryResponse.to_json(datastore.RunAggregationQueryResponse()) - - request = datastore.RunAggregationQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.RunAggregationQueryResponse() - - client.run_aggregation_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_run_aggregation_query_rest_bad_request(transport: str = 'rest', request_type=datastore.RunAggregationQueryRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_aggregation_query(request) - - -def test_run_aggregation_query_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.BeginTransactionRequest, - dict, -]) -def test_begin_transaction_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.BeginTransactionResponse( - transaction=b'transaction_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.begin_transaction(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.BeginTransactionResponse) - assert response.transaction == b'transaction_blob' - - -def test_begin_transaction_rest_required_fields(request_type=datastore.BeginTransactionRequest): - transport_class = transports.DatastoreRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datastore.BeginTransactionResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datastore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.begin_transaction(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_begin_transaction_rest_unset_required_fields(): - transport = transports.DatastoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.begin_transaction._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_begin_transaction_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreRestInterceptor, "post_begin_transaction") as post, \ - mock.patch.object(transports.DatastoreRestInterceptor, "pre_begin_transaction") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.BeginTransactionRequest.pb(datastore.BeginTransactionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.BeginTransactionResponse.to_json(datastore.BeginTransactionResponse()) - - request = datastore.BeginTransactionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.BeginTransactionResponse() - - client.begin_transaction(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_begin_transaction_rest_bad_request(transport: str = 'rest', request_type=datastore.BeginTransactionRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.begin_transaction(request) - - -def test_begin_transaction_rest_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.BeginTransactionResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.BeginTransactionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.begin_transaction(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}:beginTransaction" % client.transport._host, args[1]) - - -def test_begin_transaction_rest_flattened_error(transport: str = 'rest'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - datastore.BeginTransactionRequest(), - project_id='project_id_value', - ) - - -def test_begin_transaction_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.CommitRequest, - dict, -]) -def test_commit_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.CommitResponse( - index_updates=1389, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.commit(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.CommitResponse) - assert response.index_updates == 1389 - - -def test_commit_rest_required_fields(request_type=datastore.CommitRequest): - transport_class = transports.DatastoreRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datastore.CommitResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datastore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.commit(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_commit_rest_unset_required_fields(): - transport = transports.DatastoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.commit._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_commit_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreRestInterceptor, "post_commit") as post, \ - mock.patch.object(transports.DatastoreRestInterceptor, "pre_commit") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.CommitRequest.pb(datastore.CommitRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.CommitResponse.to_json(datastore.CommitResponse()) - - request = datastore.CommitRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.CommitResponse() - - client.commit(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_commit_rest_bad_request(transport: str = 'rest', request_type=datastore.CommitRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.commit(request) - - -def test_commit_rest_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.CommitResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - mode=datastore.CommitRequest.Mode.TRANSACTIONAL, - mutations=[datastore.Mutation(insert=entity.Entity(key=entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))))], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.commit(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}:commit" % client.transport._host, args[1]) - - -def test_commit_rest_flattened_error(transport: str = 'rest'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - datastore.CommitRequest(), - project_id='project_id_value', - mode=datastore.CommitRequest.Mode.TRANSACTIONAL, - transaction=b'transaction_blob', - mutations=[datastore.Mutation(insert=entity.Entity(key=entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))))], - ) - - -def test_commit_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.RollbackRequest, - dict, -]) -def test_rollback_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.RollbackResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.RollbackResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.rollback(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.RollbackResponse) - - -def test_rollback_rest_required_fields(request_type=datastore.RollbackRequest): - transport_class = transports.DatastoreRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["transaction"] = b'' - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["transaction"] = b'transaction_blob' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "transaction" in jsonified_request - assert jsonified_request["transaction"] == b'transaction_blob' - - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datastore.RollbackResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datastore.RollbackResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.rollback(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rollback_rest_unset_required_fields(): - transport = transports.DatastoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rollback._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "transaction", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreRestInterceptor, "post_rollback") as post, \ - mock.patch.object(transports.DatastoreRestInterceptor, "pre_rollback") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.RollbackRequest.pb(datastore.RollbackRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.RollbackResponse.to_json(datastore.RollbackResponse()) - - request = datastore.RollbackRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.RollbackResponse() - - client.rollback(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_rollback_rest_bad_request(transport: str = 'rest', request_type=datastore.RollbackRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.rollback(request) - - -def test_rollback_rest_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.RollbackResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - transaction=b'transaction_blob', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.RollbackResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.rollback(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}:rollback" % client.transport._host, args[1]) - - -def test_rollback_rest_flattened_error(transport: str = 'rest'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - datastore.RollbackRequest(), - project_id='project_id_value', - transaction=b'transaction_blob', - ) - - -def test_rollback_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.AllocateIdsRequest, - dict, -]) -def test_allocate_ids_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.AllocateIdsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.AllocateIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.allocate_ids(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.AllocateIdsResponse) - - -def test_allocate_ids_rest_required_fields(request_type=datastore.AllocateIdsRequest): - transport_class = transports.DatastoreRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).allocate_ids._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).allocate_ids._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datastore.AllocateIdsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datastore.AllocateIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.allocate_ids(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_allocate_ids_rest_unset_required_fields(): - transport = transports.DatastoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.allocate_ids._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "keys", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_allocate_ids_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreRestInterceptor, "post_allocate_ids") as post, \ - mock.patch.object(transports.DatastoreRestInterceptor, "pre_allocate_ids") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.AllocateIdsRequest.pb(datastore.AllocateIdsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.AllocateIdsResponse.to_json(datastore.AllocateIdsResponse()) - - request = datastore.AllocateIdsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.AllocateIdsResponse() - - client.allocate_ids(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_allocate_ids_rest_bad_request(transport: str = 'rest', request_type=datastore.AllocateIdsRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.allocate_ids(request) - - -def test_allocate_ids_rest_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.AllocateIdsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.AllocateIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.allocate_ids(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}:allocateIds" % client.transport._host, args[1]) - - -def test_allocate_ids_rest_flattened_error(transport: str = 'rest'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.allocate_ids( - datastore.AllocateIdsRequest(), - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - -def test_allocate_ids_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore.ReserveIdsRequest, - dict, -]) -def test_reserve_ids_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.ReserveIdsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.ReserveIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.reserve_ids(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datastore.ReserveIdsResponse) - - -def test_reserve_ids_rest_required_fields(request_type=datastore.ReserveIdsRequest): - transport_class = transports.DatastoreRestTransport - - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reserve_ids._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reserve_ids._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datastore.ReserveIdsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datastore.ReserveIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.reserve_ids(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_reserve_ids_rest_unset_required_fields(): - transport = transports.DatastoreRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.reserve_ids._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "keys", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reserve_ids_rest_interceptors(null_interceptor): - transport = transports.DatastoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreRestInterceptor(), - ) - client = DatastoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreRestInterceptor, "post_reserve_ids") as post, \ - mock.patch.object(transports.DatastoreRestInterceptor, "pre_reserve_ids") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore.ReserveIdsRequest.pb(datastore.ReserveIdsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore.ReserveIdsResponse.to_json(datastore.ReserveIdsResponse()) - - request = datastore.ReserveIdsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore.ReserveIdsResponse() - - client.reserve_ids(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reserve_ids_rest_bad_request(transport: str = 'rest', request_type=datastore.ReserveIdsRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reserve_ids(request) - - -def test_reserve_ids_rest_flattened(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore.ReserveIdsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore.ReserveIdsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.reserve_ids(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}:reserveIds" % client.transport._host, args[1]) - - -def test_reserve_ids_rest_flattened_error(transport: str = 'rest'): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reserve_ids( - datastore.ReserveIdsRequest(), - project_id='project_id_value', - keys=[entity.Key(partition_id=entity.PartitionId(project_id='project_id_value'))], - ) - - -def test_reserve_ids_rest_error(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatastoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatastoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatastoreClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DatastoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DatastoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatastoreClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DatastoreClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DatastoreGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DatastoreGrpcTransport, - transports.DatastoreGrpcAsyncIOTransport, - transports.DatastoreRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = DatastoreClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DatastoreGrpcTransport, - ) - -def test_datastore_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatastoreTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_datastore_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DatastoreTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'lookup', - 'run_query', - 'run_aggregation_query', - 'begin_transaction', - 'commit', - 'rollback', - 'allocate_ids', - 'reserve_ids', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_datastore_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatastoreTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - quota_project_id="octopus", - ) - - -def test_datastore_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datastore_v1.services.datastore.transports.DatastoreTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatastoreTransport() - adc.assert_called_once() - - -def test_datastore_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DatastoreClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreGrpcTransport, - transports.DatastoreGrpcAsyncIOTransport, - ], -) -def test_datastore_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/datastore',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreGrpcTransport, - transports.DatastoreGrpcAsyncIOTransport, - transports.DatastoreRestTransport, - ], -) -def test_datastore_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DatastoreGrpcTransport, grpc_helpers), - (transports.DatastoreGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_datastore_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datastore.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - scopes=["1", "2"], - default_host="datastore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport]) -def test_datastore_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_datastore_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DatastoreRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_datastore_host_no_port(transport_name): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datastore.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datastore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://datastore.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_datastore_host_with_port(transport_name): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datastore.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datastore.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://datastore.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_datastore_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DatastoreClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DatastoreClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.lookup._session - session2 = client2.transport.lookup._session - assert session1 != session2 - session1 = client1.transport.run_query._session - session2 = client2.transport.run_query._session - assert session1 != session2 - session1 = client1.transport.run_aggregation_query._session - session2 = client2.transport.run_aggregation_query._session - assert session1 != session2 - session1 = client1.transport.begin_transaction._session - session2 = client2.transport.begin_transaction._session - assert session1 != session2 - session1 = client1.transport.commit._session - session2 = client2.transport.commit._session - assert session1 != session2 - session1 = client1.transport.rollback._session - session2 = client2.transport.rollback._session - assert session1 != session2 - session1 = client1.transport.allocate_ids._session - session2 = client2.transport.allocate_ids._session - assert session1 != session2 - session1 = client1.transport.reserve_ids._session - session2 = client2.transport.reserve_ids._session - assert session1 != session2 -def test_datastore_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatastoreGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_datastore_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatastoreGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport]) -def test_datastore_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport]) -def test_datastore_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DatastoreClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DatastoreClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = DatastoreClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DatastoreClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DatastoreClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DatastoreClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = DatastoreClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DatastoreClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DatastoreClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DatastoreClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DatastoreTransport, '_prep_wrapped_messages') as prep: - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DatastoreTransport, '_prep_wrapped_messages') as prep: - transport_class = DatastoreClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/operations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/operations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/operations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/operations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/operations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/operations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DatastoreClient, transports.DatastoreGrpcTransport), - (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/datastore_admin/v1/.coveragerc b/owl-bot-staging/datastore_admin/v1/.coveragerc deleted file mode 100644 index 430c8156..00000000 --- a/owl-bot-staging/datastore_admin/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/datastore_admin/__init__.py - google/cloud/datastore_admin/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/datastore_admin/v1/.flake8 b/owl-bot-staging/datastore_admin/v1/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/datastore_admin/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/datastore_admin/v1/MANIFEST.in b/owl-bot-staging/datastore_admin/v1/MANIFEST.in deleted file mode 100644 index 97b38353..00000000 --- a/owl-bot-staging/datastore_admin/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/datastore_admin *.py -recursive-include google/cloud/datastore_admin_v1 *.py diff --git a/owl-bot-staging/datastore_admin/v1/README.rst b/owl-bot-staging/datastore_admin/v1/README.rst deleted file mode 100644 index 8cdd7ec6..00000000 --- a/owl-bot-staging/datastore_admin/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Datastore Admin API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Datastore Admin API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/datastore_admin/v1/docs/_static/custom.css b/owl-bot-staging/datastore_admin/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0..00000000 --- a/owl-bot-staging/datastore_admin/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/datastore_admin/v1/docs/conf.py b/owl-bot-staging/datastore_admin/v1/docs/conf.py deleted file mode 100644 index 8985f03b..00000000 --- a/owl-bot-staging/datastore_admin/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-datastore documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-datastore" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-datastore-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-datastore.tex", - u"google-cloud-datastore Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-datastore", - u"Google Cloud Datastore Admin Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-datastore", - u"google-cloud-datastore Documentation", - author, - "google-cloud-datastore", - "GAPIC library for Google Cloud Datastore Admin API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/datastore_admin.rst b/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/datastore_admin.rst deleted file mode 100644 index 7c72691a..00000000 --- a/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/datastore_admin.rst +++ /dev/null @@ -1,10 +0,0 @@ -DatastoreAdmin --------------------------------- - -.. automodule:: google.cloud.datastore_admin_v1.services.datastore_admin - :members: - :inherited-members: - -.. automodule:: google.cloud.datastore_admin_v1.services.datastore_admin.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/services_.rst b/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/services_.rst deleted file mode 100644 index 2538574a..00000000 --- a/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Datastore Admin v1 API -================================================ -.. toctree:: - :maxdepth: 2 - - datastore_admin diff --git a/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/types_.rst b/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/types_.rst deleted file mode 100644 index a5ff1278..00000000 --- a/owl-bot-staging/datastore_admin/v1/docs/datastore_admin_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Datastore Admin v1 API -============================================= - -.. automodule:: google.cloud.datastore_admin_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/datastore_admin/v1/docs/index.rst b/owl-bot-staging/datastore_admin/v1/docs/index.rst deleted file mode 100644 index 826cf640..00000000 --- a/owl-bot-staging/datastore_admin/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - datastore_admin_v1/services - datastore_admin_v1/types diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/__init__.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/__init__.py deleted file mode 100644 index c771b164..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/__init__.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.datastore_admin import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.datastore_admin_v1.services.datastore_admin.client import DatastoreAdminClient -from google.cloud.datastore_admin_v1.services.datastore_admin.async_client import DatastoreAdminAsyncClient - -from google.cloud.datastore_admin_v1.types.datastore_admin import CommonMetadata -from google.cloud.datastore_admin_v1.types.datastore_admin import CreateIndexRequest -from google.cloud.datastore_admin_v1.types.datastore_admin import DatastoreFirestoreMigrationMetadata -from google.cloud.datastore_admin_v1.types.datastore_admin import DeleteIndexRequest -from google.cloud.datastore_admin_v1.types.datastore_admin import EntityFilter -from google.cloud.datastore_admin_v1.types.datastore_admin import ExportEntitiesMetadata -from google.cloud.datastore_admin_v1.types.datastore_admin import ExportEntitiesRequest -from google.cloud.datastore_admin_v1.types.datastore_admin import ExportEntitiesResponse -from google.cloud.datastore_admin_v1.types.datastore_admin import GetIndexRequest -from google.cloud.datastore_admin_v1.types.datastore_admin import ImportEntitiesMetadata -from google.cloud.datastore_admin_v1.types.datastore_admin import ImportEntitiesRequest -from google.cloud.datastore_admin_v1.types.datastore_admin import IndexOperationMetadata -from google.cloud.datastore_admin_v1.types.datastore_admin import ListIndexesRequest -from google.cloud.datastore_admin_v1.types.datastore_admin import ListIndexesResponse -from google.cloud.datastore_admin_v1.types.datastore_admin import Progress -from google.cloud.datastore_admin_v1.types.datastore_admin import OperationType -from google.cloud.datastore_admin_v1.types.index import Index -from google.cloud.datastore_admin_v1.types.migration import MigrationProgressEvent -from google.cloud.datastore_admin_v1.types.migration import MigrationStateEvent -from google.cloud.datastore_admin_v1.types.migration import MigrationState -from google.cloud.datastore_admin_v1.types.migration import MigrationStep - -__all__ = ('DatastoreAdminClient', - 'DatastoreAdminAsyncClient', - 'CommonMetadata', - 'CreateIndexRequest', - 'DatastoreFirestoreMigrationMetadata', - 'DeleteIndexRequest', - 'EntityFilter', - 'ExportEntitiesMetadata', - 'ExportEntitiesRequest', - 'ExportEntitiesResponse', - 'GetIndexRequest', - 'ImportEntitiesMetadata', - 'ImportEntitiesRequest', - 'IndexOperationMetadata', - 'ListIndexesRequest', - 'ListIndexesResponse', - 'Progress', - 'OperationType', - 'Index', - 'MigrationProgressEvent', - 'MigrationStateEvent', - 'MigrationState', - 'MigrationStep', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/gapic_version.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/gapic_version.py deleted file mode 100644 index 558c8aab..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/py.typed b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/py.typed deleted file mode 100644 index e82a9319..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-datastore package uses inline types. diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/__init__.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/__init__.py deleted file mode 100644 index 21f077de..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/__init__.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.datastore_admin_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.datastore_admin import DatastoreAdminClient -from .services.datastore_admin import DatastoreAdminAsyncClient - -from .types.datastore_admin import CommonMetadata -from .types.datastore_admin import CreateIndexRequest -from .types.datastore_admin import DatastoreFirestoreMigrationMetadata -from .types.datastore_admin import DeleteIndexRequest -from .types.datastore_admin import EntityFilter -from .types.datastore_admin import ExportEntitiesMetadata -from .types.datastore_admin import ExportEntitiesRequest -from .types.datastore_admin import ExportEntitiesResponse -from .types.datastore_admin import GetIndexRequest -from .types.datastore_admin import ImportEntitiesMetadata -from .types.datastore_admin import ImportEntitiesRequest -from .types.datastore_admin import IndexOperationMetadata -from .types.datastore_admin import ListIndexesRequest -from .types.datastore_admin import ListIndexesResponse -from .types.datastore_admin import Progress -from .types.datastore_admin import OperationType -from .types.index import Index -from .types.migration import MigrationProgressEvent -from .types.migration import MigrationStateEvent -from .types.migration import MigrationState -from .types.migration import MigrationStep - -__all__ = ( - 'DatastoreAdminAsyncClient', -'CommonMetadata', -'CreateIndexRequest', -'DatastoreAdminClient', -'DatastoreFirestoreMigrationMetadata', -'DeleteIndexRequest', -'EntityFilter', -'ExportEntitiesMetadata', -'ExportEntitiesRequest', -'ExportEntitiesResponse', -'GetIndexRequest', -'ImportEntitiesMetadata', -'ImportEntitiesRequest', -'Index', -'IndexOperationMetadata', -'ListIndexesRequest', -'ListIndexesResponse', -'MigrationProgressEvent', -'MigrationState', -'MigrationStateEvent', -'MigrationStep', -'OperationType', -'Progress', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/gapic_metadata.json b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/gapic_metadata.json deleted file mode 100644 index 450debcd..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/gapic_metadata.json +++ /dev/null @@ -1,118 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.datastore_admin_v1", - "protoPackage": "google.datastore.admin.v1", - "schema": "1.0", - "services": { - "DatastoreAdmin": { - "clients": { - "grpc": { - "libraryClient": "DatastoreAdminClient", - "rpcs": { - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "ExportEntities": { - "methods": [ - "export_entities" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "ImportEntities": { - "methods": [ - "import_entities" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DatastoreAdminAsyncClient", - "rpcs": { - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "ExportEntities": { - "methods": [ - "export_entities" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "ImportEntities": { - "methods": [ - "import_entities" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - } - } - }, - "rest": { - "libraryClient": "DatastoreAdminClient", - "rpcs": { - "CreateIndex": { - "methods": [ - "create_index" - ] - }, - "DeleteIndex": { - "methods": [ - "delete_index" - ] - }, - "ExportEntities": { - "methods": [ - "export_entities" - ] - }, - "GetIndex": { - "methods": [ - "get_index" - ] - }, - "ImportEntities": { - "methods": [ - "import_entities" - ] - }, - "ListIndexes": { - "methods": [ - "list_indexes" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/gapic_version.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/gapic_version.py deleted file mode 100644 index 558c8aab..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/py.typed b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/py.typed deleted file mode 100644 index e82a9319..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-datastore package uses inline types. diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/__init__.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/__init__.py deleted file mode 100644 index 8f6cf068..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py deleted file mode 100644 index 373ccdf1..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DatastoreAdminClient -from .async_client import DatastoreAdminAsyncClient - -__all__ = ( - 'DatastoreAdminClient', - 'DatastoreAdminAsyncClient', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py deleted file mode 100644 index 517f07b0..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ /dev/null @@ -1,1285 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.datastore_admin_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.datastore_admin_v1.services.datastore_admin import pagers -from google.cloud.datastore_admin_v1.types import datastore_admin -from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport -from .client import DatastoreAdminClient - - -class DatastoreAdminAsyncClient: - """Google Cloud Datastore Admin API - - The Datastore Admin API provides several admin services for - Cloud Datastore. - - Concepts: Project, namespace, kind, and entity as defined in the - Google Cloud Datastore API. - - Operation: An Operation represents work being performed in the - background. - - EntityFilter: Allows specifying a subset of entities in a - project. This is specified as a combination of kinds and - namespaces (either or both of which may be all). - - Export/Import Service: - - - The Export/Import service provides the ability to copy all or - a subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one - project and then import into another. - - Exported data can also be loaded into Google BigQuery for - analysis. - - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state - (including any errors encountered) of the export/import may be - queried via the Operation resource. - - Index Service: - - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - Operation Service: - - - The Operations collection provides a record of actions - performed for the specified project (including any operations - in progress). Operations are not created directly but through - calls on other collections or resources. - - An operation that is not yet done may be cancelled. The - request to cancel is asynchronous and the operation may - continue to run for some time after the request to cancel is - made. - - An operation that is done may be deleted so that it is no - longer listed as part of the Operation collection. - - ListOperations returns all pending operations, but not - completed operations. - - Operations are created by service DatastoreAdmin, but are - accessed via service google.longrunning.Operations. - """ - - _client: DatastoreAdminClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DatastoreAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DatastoreAdminClient._DEFAULT_UNIVERSE - - common_billing_account_path = staticmethod(DatastoreAdminClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DatastoreAdminClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DatastoreAdminClient.common_folder_path) - parse_common_folder_path = staticmethod(DatastoreAdminClient.parse_common_folder_path) - common_organization_path = staticmethod(DatastoreAdminClient.common_organization_path) - parse_common_organization_path = staticmethod(DatastoreAdminClient.parse_common_organization_path) - common_project_path = staticmethod(DatastoreAdminClient.common_project_path) - parse_common_project_path = staticmethod(DatastoreAdminClient.parse_common_project_path) - common_location_path = staticmethod(DatastoreAdminClient.common_location_path) - parse_common_location_path = staticmethod(DatastoreAdminClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreAdminAsyncClient: The constructed client. - """ - return DatastoreAdminClient.from_service_account_info.__func__(DatastoreAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreAdminAsyncClient: The constructed client. - """ - return DatastoreAdminClient.from_service_account_file.__func__(DatastoreAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DatastoreAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DatastoreAdminTransport: - """Returns the transport used by the client instance. - - Returns: - DatastoreAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = functools.partial(type(DatastoreAdminClient).get_transport_class, type(DatastoreAdminClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatastoreAdminTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the datastore admin async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DatastoreAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DatastoreAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def export_entities(self, - request: Optional[Union[datastore_admin.ExportEntitiesRequest, dict]] = None, - *, - project_id: Optional[str] = None, - labels: Optional[MutableMapping[str, str]] = None, - entity_filter: Optional[datastore_admin.EntityFilter] = None, - output_url_prefix: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports a copy of all or a subset of entities from - Google Cloud Datastore to another storage system, such - as Google Cloud Storage. Recent updates to entities may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - async def sample_export_entities(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ExportEntitiesRequest( - project_id="project_id_value", - output_url_prefix="output_url_prefix_value", - ) - - # Make the request - operation = client.export_entities(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - project_id (:class:`str`): - Required. Project ID against which to - make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - labels (:class:`MutableMapping[str, str]`): - Client-assigned labels. - This corresponds to the ``labels`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entity_filter (:class:`google.cloud.datastore_admin_v1.types.EntityFilter`): - Description of what data from the - project is included in the export. - - This corresponds to the ``entity_filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_url_prefix (:class:`str`): - Required. Location for the export metadata and data - files. - - The full resource URL of the external storage location. - Currently, only Google Cloud Storage is supported. So - output_url_prefix should be of the form: - ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where - ``BUCKET_NAME`` is the name of the Cloud Storage bucket - and ``NAMESPACE_PATH`` is an optional Cloud Storage - namespace path (this is not a Cloud Datastore - namespace). For more information about Cloud Storage - namespace paths, see `Object name - considerations `__. - - The resulting files will be nested deeper than the - specified URL prefix. The final output URL will be - provided in the - [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] - field. That value should be used for subsequent - ImportEntities operations. - - By nesting the data files deeper, the same Cloud Storage - bucket can be used in multiple ExportEntities operations - without conflict. - - This corresponds to the ``output_url_prefix`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.datastore_admin_v1.types.ExportEntitiesResponse` The response for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, labels, entity_filter, output_url_prefix]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datastore_admin.ExportEntitiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if entity_filter is not None: - request.entity_filter = entity_filter - if output_url_prefix is not None: - request.output_url_prefix = output_url_prefix - - if labels: - request.labels.update(labels) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.export_entities, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - datastore_admin.ExportEntitiesResponse, - metadata_type=datastore_admin.ExportEntitiesMetadata, - ) - - # Done; return the response. - return response - - async def import_entities(self, - request: Optional[Union[datastore_admin.ImportEntitiesRequest, dict]] = None, - *, - project_id: Optional[str] = None, - labels: Optional[MutableMapping[str, str]] = None, - input_url: Optional[str] = None, - entity_filter: Optional[datastore_admin.EntityFilter] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Imports entities into Google Cloud Datastore. - Existing entities with the same key are overwritten. The - import occurs in the background and its progress can be - monitored and managed via the Operation resource that is - created. If an ImportEntities operation is cancelled, it - is possible that a subset of the data has already been - imported to Cloud Datastore. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - async def sample_import_entities(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ImportEntitiesRequest( - project_id="project_id_value", - input_url="input_url_value", - ) - - # Make the request - operation = client.import_entities(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. - project_id (:class:`str`): - Required. Project ID against which to - make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - labels (:class:`MutableMapping[str, str]`): - Client-assigned labels. - This corresponds to the ``labels`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_url (:class:`str`): - Required. The full resource URL of the external storage - location. Currently, only Google Cloud Storage is - supported. So input_url should be of the form: - ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, - where ``BUCKET_NAME`` is the name of the Cloud Storage - bucket, ``NAMESPACE_PATH`` is an optional Cloud Storage - namespace path (this is not a Cloud Datastore - namespace), and ``OVERALL_EXPORT_METADATA_FILE`` is the - metadata file written by the ExportEntities operation. - For more information about Cloud Storage namespace - paths, see `Object name - considerations `__. - - For more information, see - [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. - - This corresponds to the ``input_url`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entity_filter (:class:`google.cloud.datastore_admin_v1.types.EntityFilter`): - Optionally specify which kinds/namespaces are to be - imported. If provided, the list must be a subset of the - EntityFilter used in creating the export, otherwise a - FAILED_PRECONDITION error will be returned. If no filter - is specified then all entities from the export are - imported. - - This corresponds to the ``entity_filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, labels, input_url, entity_filter]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datastore_admin.ImportEntitiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if input_url is not None: - request.input_url = input_url - if entity_filter is not None: - request.entity_filter = entity_filter - - if labels: - request.labels.update(labels) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_entities, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=datastore_admin.ImportEntitiesMetadata, - ) - - # Done; return the response. - return response - - async def create_index(self, - request: Optional[Union[datastore_admin.CreateIndexRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates the specified index. A newly created index's initial - state is ``CREATING``. On completion of the returned - [google.longrunning.Operation][google.longrunning.Operation], - the state will be ``READY``. If the index already exists, the - call will return an ``ALREADY_EXISTS`` status. - - During index creation, the process could result in an error, in - which case the index will move to the ``ERROR`` state. The - process can be recovered by fixing the data that caused the - error, removing the index with - [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex], - then re-creating the index with [create] - [google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - - Indexes with a single property cannot be created. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - async def sample_create_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.CreateIndexRequest( - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.datastore_admin_v1.types.Index` - Datastore composite index definition. - - """ - # Create or coerce a protobuf request object. - request = datastore_admin.CreateIndexRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_index, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - index.Index, - metadata_type=datastore_admin.IndexOperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_index(self, - request: Optional[Union[datastore_admin.DeleteIndexRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes an existing index. An index can only be deleted if it is - in a ``READY`` or ``ERROR`` state. On successful execution of - the request, the index will be in a ``DELETING`` - [state][google.datastore.admin.v1.Index.State]. And on - completion of the returned - [google.longrunning.Operation][google.longrunning.Operation], - the index will be removed. - - During index deletion, the process could result in an error, in - which case the index will move to the ``ERROR`` state. The - process can be recovered by fixing the data that caused the - error, followed by calling - [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] - again. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - async def sample_delete_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.DeleteIndexRequest( - ) - - # Make the request - operation = client.delete_index(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.datastore_admin_v1.types.Index` - Datastore composite index definition. - - """ - # Create or coerce a protobuf request object. - request = datastore_admin.DeleteIndexRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_index, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("index_id", request.index_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - index.Index, - metadata_type=datastore_admin.IndexOperationMetadata, - ) - - # Done; return the response. - return response - - async def get_index(self, - request: Optional[Union[datastore_admin.GetIndexRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> index.Index: - r"""Gets an index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - async def sample_get_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.GetIndexRequest( - ) - - # Make the request - response = await client.get_index(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_admin_v1.types.Index: - Datastore composite index definition. - """ - # Create or coerce a protobuf request object. - request = datastore_admin.GetIndexRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_index, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("index_id", request.index_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_indexes(self, - request: Optional[Union[datastore_admin.ListIndexesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListIndexesAsyncPager: - r"""Lists the indexes that match the specified filters. - Datastore uses an eventually consistent query to fetch - the list of indexes and may occasionally return stale - results. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - async def sample_list_indexes(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ListIndexesRequest( - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesAsyncPager: - The response for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - request = datastore_admin.ListIndexesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_indexes, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListIndexesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "DatastoreAdminAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DatastoreAdminAsyncClient", -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/client.py deleted file mode 100644 index 5c493d23..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ /dev/null @@ -1,1616 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.datastore_admin_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.datastore_admin_v1.services.datastore_admin import pagers -from google.cloud.datastore_admin_v1.types import datastore_admin -from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .transports.base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DatastoreAdminGrpcTransport -from .transports.grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport -from .transports.rest import DatastoreAdminRestTransport - - -class DatastoreAdminClientMeta(type): - """Metaclass for the DatastoreAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreAdminTransport]] - _transport_registry["grpc"] = DatastoreAdminGrpcTransport - _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport - _transport_registry["rest"] = DatastoreAdminRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DatastoreAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DatastoreAdminClient(metaclass=DatastoreAdminClientMeta): - """Google Cloud Datastore Admin API - - The Datastore Admin API provides several admin services for - Cloud Datastore. - - Concepts: Project, namespace, kind, and entity as defined in the - Google Cloud Datastore API. - - Operation: An Operation represents work being performed in the - background. - - EntityFilter: Allows specifying a subset of entities in a - project. This is specified as a combination of kinds and - namespaces (either or both of which may be all). - - Export/Import Service: - - - The Export/Import service provides the ability to copy all or - a subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one - project and then import into another. - - Exported data can also be loaded into Google BigQuery for - analysis. - - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state - (including any errors encountered) of the export/import may be - queried via the Operation resource. - - Index Service: - - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - Operation Service: - - - The Operations collection provides a record of actions - performed for the specified project (including any operations - in progress). Operations are not created directly but through - calls on other collections or resources. - - An operation that is not yet done may be cancelled. The - request to cancel is asynchronous and the operation may - continue to run for some time after the request to cancel is - made. - - An operation that is done may be deleted so that it is no - longer listed as part of the Operation collection. - - ListOperations returns all pending operations, but not - completed operations. - - Operations are created by service DatastoreAdmin, but are - accessed via service google.longrunning.Operations. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "datastore.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "datastore.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatastoreAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DatastoreAdminTransport: - """Returns the transport used by the client instance. - - Returns: - DatastoreAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DatastoreAdminClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - DatastoreAdminClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatastoreAdminTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the datastore admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DatastoreAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DatastoreAdminClient._read_environment_variables() - self._client_cert_source = DatastoreAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DatastoreAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DatastoreAdminTransport) - if transport_provided: - # transport is a DatastoreAdminTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DatastoreAdminTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DatastoreAdminClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - def export_entities(self, - request: Optional[Union[datastore_admin.ExportEntitiesRequest, dict]] = None, - *, - project_id: Optional[str] = None, - labels: Optional[MutableMapping[str, str]] = None, - entity_filter: Optional[datastore_admin.EntityFilter] = None, - output_url_prefix: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Exports a copy of all or a subset of entities from - Google Cloud Datastore to another storage system, such - as Google Cloud Storage. Recent updates to entities may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - def sample_export_entities(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ExportEntitiesRequest( - project_id="project_id_value", - output_url_prefix="output_url_prefix_value", - ) - - # Make the request - operation = client.export_entities(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - project_id (str): - Required. Project ID against which to - make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - labels (MutableMapping[str, str]): - Client-assigned labels. - This corresponds to the ``labels`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): - Description of what data from the - project is included in the export. - - This corresponds to the ``entity_filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_url_prefix (str): - Required. Location for the export metadata and data - files. - - The full resource URL of the external storage location. - Currently, only Google Cloud Storage is supported. So - output_url_prefix should be of the form: - ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where - ``BUCKET_NAME`` is the name of the Cloud Storage bucket - and ``NAMESPACE_PATH`` is an optional Cloud Storage - namespace path (this is not a Cloud Datastore - namespace). For more information about Cloud Storage - namespace paths, see `Object name - considerations `__. - - The resulting files will be nested deeper than the - specified URL prefix. The final output URL will be - provided in the - [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] - field. That value should be used for subsequent - ImportEntities operations. - - By nesting the data files deeper, the same Cloud Storage - bucket can be used in multiple ExportEntities operations - without conflict. - - This corresponds to the ``output_url_prefix`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.datastore_admin_v1.types.ExportEntitiesResponse` The response for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, labels, entity_filter, output_url_prefix]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ExportEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore_admin.ExportEntitiesRequest): - request = datastore_admin.ExportEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if labels is not None: - request.labels = labels - if entity_filter is not None: - request.entity_filter = entity_filter - if output_url_prefix is not None: - request.output_url_prefix = output_url_prefix - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_entities] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - datastore_admin.ExportEntitiesResponse, - metadata_type=datastore_admin.ExportEntitiesMetadata, - ) - - # Done; return the response. - return response - - def import_entities(self, - request: Optional[Union[datastore_admin.ImportEntitiesRequest, dict]] = None, - *, - project_id: Optional[str] = None, - labels: Optional[MutableMapping[str, str]] = None, - input_url: Optional[str] = None, - entity_filter: Optional[datastore_admin.EntityFilter] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Imports entities into Google Cloud Datastore. - Existing entities with the same key are overwritten. The - import occurs in the background and its progress can be - monitored and managed via the Operation resource that is - created. If an ImportEntities operation is cancelled, it - is possible that a subset of the data has already been - imported to Cloud Datastore. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - def sample_import_entities(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ImportEntitiesRequest( - project_id="project_id_value", - input_url="input_url_value", - ) - - # Make the request - operation = client.import_entities(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. - project_id (str): - Required. Project ID against which to - make the request. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - labels (MutableMapping[str, str]): - Client-assigned labels. - This corresponds to the ``labels`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_url (str): - Required. The full resource URL of the external storage - location. Currently, only Google Cloud Storage is - supported. So input_url should be of the form: - ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, - where ``BUCKET_NAME`` is the name of the Cloud Storage - bucket, ``NAMESPACE_PATH`` is an optional Cloud Storage - namespace path (this is not a Cloud Datastore - namespace), and ``OVERALL_EXPORT_METADATA_FILE`` is the - metadata file written by the ExportEntities operation. - For more information about Cloud Storage namespace - paths, see `Object name - considerations `__. - - For more information, see - [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. - - This corresponds to the ``input_url`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): - Optionally specify which kinds/namespaces are to be - imported. If provided, the list must be a subset of the - EntityFilter used in creating the export, otherwise a - FAILED_PRECONDITION error will be returned. If no filter - is specified then all entities from the export are - imported. - - This corresponds to the ``entity_filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, labels, input_url, entity_filter]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ImportEntitiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore_admin.ImportEntitiesRequest): - request = datastore_admin.ImportEntitiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if labels is not None: - request.labels = labels - if input_url is not None: - request.input_url = input_url - if entity_filter is not None: - request.entity_filter = entity_filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_entities] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=datastore_admin.ImportEntitiesMetadata, - ) - - # Done; return the response. - return response - - def create_index(self, - request: Optional[Union[datastore_admin.CreateIndexRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates the specified index. A newly created index's initial - state is ``CREATING``. On completion of the returned - [google.longrunning.Operation][google.longrunning.Operation], - the state will be ``READY``. If the index already exists, the - call will return an ``ALREADY_EXISTS`` status. - - During index creation, the process could result in an error, in - which case the index will move to the ``ERROR`` state. The - process can be recovered by fixing the data that caused the - error, removing the index with - [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex], - then re-creating the index with [create] - [google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - - Indexes with a single property cannot be created. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - def sample_create_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.CreateIndexRequest( - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.datastore_admin_v1.types.Index` - Datastore composite index definition. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.CreateIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore_admin.CreateIndexRequest): - request = datastore_admin.CreateIndexRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - index.Index, - metadata_type=datastore_admin.IndexOperationMetadata, - ) - - # Done; return the response. - return response - - def delete_index(self, - request: Optional[Union[datastore_admin.DeleteIndexRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes an existing index. An index can only be deleted if it is - in a ``READY`` or ``ERROR`` state. On successful execution of - the request, the index will be in a ``DELETING`` - [state][google.datastore.admin.v1.Index.State]. And on - completion of the returned - [google.longrunning.Operation][google.longrunning.Operation], - the index will be removed. - - During index deletion, the process could result in an error, in - which case the index will move to the ``ERROR`` state. The - process can be recovered by fixing the data that caused the - error, followed by calling - [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] - again. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - def sample_delete_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.DeleteIndexRequest( - ) - - # Make the request - operation = client.delete_index(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.datastore_admin_v1.types.Index` - Datastore composite index definition. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.DeleteIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore_admin.DeleteIndexRequest): - request = datastore_admin.DeleteIndexRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("index_id", request.index_id), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - index.Index, - metadata_type=datastore_admin.IndexOperationMetadata, - ) - - # Done; return the response. - return response - - def get_index(self, - request: Optional[Union[datastore_admin.GetIndexRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> index.Index: - r"""Gets an index. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - def sample_get_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.GetIndexRequest( - ) - - # Make the request - response = client.get_index(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_admin_v1.types.Index: - Datastore composite index definition. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.GetIndexRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore_admin.GetIndexRequest): - request = datastore_admin.GetIndexRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_index] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("index_id", request.index_id), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_indexes(self, - request: Optional[Union[datastore_admin.ListIndexesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListIndexesPager: - r"""Lists the indexes that match the specified filters. - Datastore uses an eventually consistent query to fetch - the list of indexes and may occasionally return stale - results. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datastore_admin_v1 - - def sample_list_indexes(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ListIndexesRequest( - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesPager: - The response for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datastore_admin.ListIndexesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datastore_admin.ListIndexesRequest): - request = datastore_admin.ListIndexesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_indexes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListIndexesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DatastoreAdminClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DatastoreAdminClient", -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py deleted file mode 100644 index 134edc43..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.datastore_admin_v1.types import datastore_admin -from google.cloud.datastore_admin_v1.types import index - - -class ListIndexesPager: - """A pager for iterating through ``list_indexes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datastore_admin_v1.types.ListIndexesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``indexes`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListIndexes`` requests and continue to iterate - through the ``indexes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datastore_admin_v1.types.ListIndexesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datastore_admin.ListIndexesResponse], - request: datastore_admin.ListIndexesRequest, - response: datastore_admin.ListIndexesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datastore_admin_v1.types.ListIndexesRequest): - The initial request object. - response (google.cloud.datastore_admin_v1.types.ListIndexesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datastore_admin.ListIndexesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datastore_admin.ListIndexesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[index.Index]: - for page in self.pages: - yield from page.indexes - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListIndexesAsyncPager: - """A pager for iterating through ``list_indexes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datastore_admin_v1.types.ListIndexesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``indexes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListIndexes`` requests and continue to iterate - through the ``indexes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datastore_admin_v1.types.ListIndexesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datastore_admin.ListIndexesResponse]], - request: datastore_admin.ListIndexesRequest, - response: datastore_admin.ListIndexesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datastore_admin_v1.types.ListIndexesRequest): - The initial request object. - response (google.cloud.datastore_admin_v1.types.ListIndexesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datastore_admin.ListIndexesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datastore_admin.ListIndexesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[index.Index]: - async def async_generator(): - async for page in self.pages: - for response in page.indexes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py deleted file mode 100644 index b5ea677e..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DatastoreAdminTransport -from .grpc import DatastoreAdminGrpcTransport -from .grpc_asyncio import DatastoreAdminGrpcAsyncIOTransport -from .rest import DatastoreAdminRestTransport -from .rest import DatastoreAdminRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DatastoreAdminTransport]] -_transport_registry['grpc'] = DatastoreAdminGrpcTransport -_transport_registry['grpc_asyncio'] = DatastoreAdminGrpcAsyncIOTransport -_transport_registry['rest'] = DatastoreAdminRestTransport - -__all__ = ( - 'DatastoreAdminTransport', - 'DatastoreAdminGrpcTransport', - 'DatastoreAdminGrpcAsyncIOTransport', - 'DatastoreAdminRestTransport', - 'DatastoreAdminRestInterceptor', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py deleted file mode 100644 index 7d9d5703..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ /dev/null @@ -1,281 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.datastore_admin_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.datastore_admin_v1.types import datastore_admin -from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DatastoreAdminTransport(abc.ABC): - """Abstract transport class for DatastoreAdmin.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', - ) - - DEFAULT_HOST: str = 'datastore.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'datastore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.export_entities: gapic_v1.method.wrap_method( - self.export_entities, - default_timeout=60.0, - client_info=client_info, - ), - self.import_entities: gapic_v1.method.wrap_method( - self.import_entities, - default_timeout=60.0, - client_info=client_info, - ), - self.create_index: gapic_v1.method.wrap_method( - self.create_index, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_index: gapic_v1.method.wrap_method( - self.delete_index, - default_timeout=60.0, - client_info=client_info, - ), - self.get_index: gapic_v1.method.wrap_method( - self.get_index, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_indexes: gapic_v1.method.wrap_method( - self.list_indexes, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def export_entities(self) -> Callable[ - [datastore_admin.ExportEntitiesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def import_entities(self) -> Callable[ - [datastore_admin.ImportEntitiesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_index(self) -> Callable[ - [datastore_admin.CreateIndexRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_index(self) -> Callable[ - [datastore_admin.DeleteIndexRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_index(self) -> Callable[ - [datastore_admin.GetIndexRequest], - Union[ - index.Index, - Awaitable[index.Index] - ]]: - raise NotImplementedError() - - @property - def list_indexes(self) -> Callable[ - [datastore_admin.ListIndexesRequest], - Union[ - datastore_admin.ListIndexesResponse, - Awaitable[datastore_admin.ListIndexesResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DatastoreAdminTransport', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py deleted file mode 100644 index f035a8a2..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ /dev/null @@ -1,584 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.datastore_admin_v1.types import datastore_admin -from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore -from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO - - -class DatastoreAdminGrpcTransport(DatastoreAdminTransport): - """gRPC backend transport for DatastoreAdmin. - - Google Cloud Datastore Admin API - - The Datastore Admin API provides several admin services for - Cloud Datastore. - - Concepts: Project, namespace, kind, and entity as defined in the - Google Cloud Datastore API. - - Operation: An Operation represents work being performed in the - background. - - EntityFilter: Allows specifying a subset of entities in a - project. This is specified as a combination of kinds and - namespaces (either or both of which may be all). - - Export/Import Service: - - - The Export/Import service provides the ability to copy all or - a subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one - project and then import into another. - - Exported data can also be loaded into Google BigQuery for - analysis. - - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state - (including any errors encountered) of the export/import may be - queried via the Operation resource. - - Index Service: - - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - Operation Service: - - - The Operations collection provides a record of actions - performed for the specified project (including any operations - in progress). Operations are not created directly but through - calls on other collections or resources. - - An operation that is not yet done may be cancelled. The - request to cancel is asynchronous and the operation may - continue to run for some time after the request to cancel is - made. - - An operation that is done may be deleted so that it is no - longer listed as part of the Operation collection. - - ListOperations returns all pending operations, but not - completed operations. - - Operations are created by service DatastoreAdmin, but are - accessed via service google.longrunning.Operations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'datastore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def export_entities(self) -> Callable[ - [datastore_admin.ExportEntitiesRequest], - operations_pb2.Operation]: - r"""Return a callable for the export entities method over gRPC. - - Exports a copy of all or a subset of entities from - Google Cloud Datastore to another storage system, such - as Google Cloud Storage. Recent updates to entities may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - Returns: - Callable[[~.ExportEntitiesRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_entities' not in self._stubs: - self._stubs['export_entities'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/ExportEntities', - request_serializer=datastore_admin.ExportEntitiesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_entities'] - - @property - def import_entities(self) -> Callable[ - [datastore_admin.ImportEntitiesRequest], - operations_pb2.Operation]: - r"""Return a callable for the import entities method over gRPC. - - Imports entities into Google Cloud Datastore. - Existing entities with the same key are overwritten. The - import occurs in the background and its progress can be - monitored and managed via the Operation resource that is - created. If an ImportEntities operation is cancelled, it - is possible that a subset of the data has already been - imported to Cloud Datastore. - - Returns: - Callable[[~.ImportEntitiesRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_entities' not in self._stubs: - self._stubs['import_entities'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/ImportEntities', - request_serializer=datastore_admin.ImportEntitiesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_entities'] - - @property - def create_index(self) -> Callable[ - [datastore_admin.CreateIndexRequest], - operations_pb2.Operation]: - r"""Return a callable for the create index method over gRPC. - - Creates the specified index. A newly created index's initial - state is ``CREATING``. On completion of the returned - [google.longrunning.Operation][google.longrunning.Operation], - the state will be ``READY``. If the index already exists, the - call will return an ``ALREADY_EXISTS`` status. - - During index creation, the process could result in an error, in - which case the index will move to the ``ERROR`` state. The - process can be recovered by fixing the data that caused the - error, removing the index with - [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex], - then re-creating the index with [create] - [google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - - Indexes with a single property cannot be created. - - Returns: - Callable[[~.CreateIndexRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_index' not in self._stubs: - self._stubs['create_index'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/CreateIndex', - request_serializer=datastore_admin.CreateIndexRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_index'] - - @property - def delete_index(self) -> Callable[ - [datastore_admin.DeleteIndexRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete index method over gRPC. - - Deletes an existing index. An index can only be deleted if it is - in a ``READY`` or ``ERROR`` state. On successful execution of - the request, the index will be in a ``DELETING`` - [state][google.datastore.admin.v1.Index.State]. And on - completion of the returned - [google.longrunning.Operation][google.longrunning.Operation], - the index will be removed. - - During index deletion, the process could result in an error, in - which case the index will move to the ``ERROR`` state. The - process can be recovered by fixing the data that caused the - error, followed by calling - [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] - again. - - Returns: - Callable[[~.DeleteIndexRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_index' not in self._stubs: - self._stubs['delete_index'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/DeleteIndex', - request_serializer=datastore_admin.DeleteIndexRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_index'] - - @property - def get_index(self) -> Callable[ - [datastore_admin.GetIndexRequest], - index.Index]: - r"""Return a callable for the get index method over gRPC. - - Gets an index. - - Returns: - Callable[[~.GetIndexRequest], - ~.Index]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_index' not in self._stubs: - self._stubs['get_index'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/GetIndex', - request_serializer=datastore_admin.GetIndexRequest.serialize, - response_deserializer=index.Index.deserialize, - ) - return self._stubs['get_index'] - - @property - def list_indexes(self) -> Callable[ - [datastore_admin.ListIndexesRequest], - datastore_admin.ListIndexesResponse]: - r"""Return a callable for the list indexes method over gRPC. - - Lists the indexes that match the specified filters. - Datastore uses an eventually consistent query to fetch - the list of indexes and may occasionally return stale - results. - - Returns: - Callable[[~.ListIndexesRequest], - ~.ListIndexesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_indexes' not in self._stubs: - self._stubs['list_indexes'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/ListIndexes', - request_serializer=datastore_admin.ListIndexesRequest.serialize, - response_deserializer=datastore_admin.ListIndexesResponse.deserialize, - ) - return self._stubs['list_indexes'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DatastoreAdminGrpcTransport', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py deleted file mode 100644 index cac7c2ea..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,583 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.datastore_admin_v1.types import datastore_admin -from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore -from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import DatastoreAdminGrpcTransport - - -class DatastoreAdminGrpcAsyncIOTransport(DatastoreAdminTransport): - """gRPC AsyncIO backend transport for DatastoreAdmin. - - Google Cloud Datastore Admin API - - The Datastore Admin API provides several admin services for - Cloud Datastore. - - Concepts: Project, namespace, kind, and entity as defined in the - Google Cloud Datastore API. - - Operation: An Operation represents work being performed in the - background. - - EntityFilter: Allows specifying a subset of entities in a - project. This is specified as a combination of kinds and - namespaces (either or both of which may be all). - - Export/Import Service: - - - The Export/Import service provides the ability to copy all or - a subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one - project and then import into another. - - Exported data can also be loaded into Google BigQuery for - analysis. - - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state - (including any errors encountered) of the export/import may be - queried via the Operation resource. - - Index Service: - - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - Operation Service: - - - The Operations collection provides a record of actions - performed for the specified project (including any operations - in progress). Operations are not created directly but through - calls on other collections or resources. - - An operation that is not yet done may be cancelled. The - request to cancel is asynchronous and the operation may - continue to run for some time after the request to cancel is - made. - - An operation that is done may be deleted so that it is no - longer listed as part of the Operation collection. - - ListOperations returns all pending operations, but not - completed operations. - - Operations are created by service DatastoreAdmin, but are - accessed via service google.longrunning.Operations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'datastore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def export_entities(self) -> Callable[ - [datastore_admin.ExportEntitiesRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the export entities method over gRPC. - - Exports a copy of all or a subset of entities from - Google Cloud Datastore to another storage system, such - as Google Cloud Storage. Recent updates to entities may - not be reflected in the export. The export occurs in the - background and its progress can be monitored and managed - via the Operation resource that is created. The output - of an export may only be used once the associated - operation is done. If an export operation is cancelled - before completion it may leave partial data behind in - Google Cloud Storage. - - Returns: - Callable[[~.ExportEntitiesRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_entities' not in self._stubs: - self._stubs['export_entities'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/ExportEntities', - request_serializer=datastore_admin.ExportEntitiesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_entities'] - - @property - def import_entities(self) -> Callable[ - [datastore_admin.ImportEntitiesRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the import entities method over gRPC. - - Imports entities into Google Cloud Datastore. - Existing entities with the same key are overwritten. The - import occurs in the background and its progress can be - monitored and managed via the Operation resource that is - created. If an ImportEntities operation is cancelled, it - is possible that a subset of the data has already been - imported to Cloud Datastore. - - Returns: - Callable[[~.ImportEntitiesRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_entities' not in self._stubs: - self._stubs['import_entities'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/ImportEntities', - request_serializer=datastore_admin.ImportEntitiesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_entities'] - - @property - def create_index(self) -> Callable[ - [datastore_admin.CreateIndexRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create index method over gRPC. - - Creates the specified index. A newly created index's initial - state is ``CREATING``. On completion of the returned - [google.longrunning.Operation][google.longrunning.Operation], - the state will be ``READY``. If the index already exists, the - call will return an ``ALREADY_EXISTS`` status. - - During index creation, the process could result in an error, in - which case the index will move to the ``ERROR`` state. The - process can be recovered by fixing the data that caused the - error, removing the index with - [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex], - then re-creating the index with [create] - [google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - - Indexes with a single property cannot be created. - - Returns: - Callable[[~.CreateIndexRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_index' not in self._stubs: - self._stubs['create_index'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/CreateIndex', - request_serializer=datastore_admin.CreateIndexRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_index'] - - @property - def delete_index(self) -> Callable[ - [datastore_admin.DeleteIndexRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete index method over gRPC. - - Deletes an existing index. An index can only be deleted if it is - in a ``READY`` or ``ERROR`` state. On successful execution of - the request, the index will be in a ``DELETING`` - [state][google.datastore.admin.v1.Index.State]. And on - completion of the returned - [google.longrunning.Operation][google.longrunning.Operation], - the index will be removed. - - During index deletion, the process could result in an error, in - which case the index will move to the ``ERROR`` state. The - process can be recovered by fixing the data that caused the - error, followed by calling - [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] - again. - - Returns: - Callable[[~.DeleteIndexRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_index' not in self._stubs: - self._stubs['delete_index'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/DeleteIndex', - request_serializer=datastore_admin.DeleteIndexRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_index'] - - @property - def get_index(self) -> Callable[ - [datastore_admin.GetIndexRequest], - Awaitable[index.Index]]: - r"""Return a callable for the get index method over gRPC. - - Gets an index. - - Returns: - Callable[[~.GetIndexRequest], - Awaitable[~.Index]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_index' not in self._stubs: - self._stubs['get_index'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/GetIndex', - request_serializer=datastore_admin.GetIndexRequest.serialize, - response_deserializer=index.Index.deserialize, - ) - return self._stubs['get_index'] - - @property - def list_indexes(self) -> Callable[ - [datastore_admin.ListIndexesRequest], - Awaitable[datastore_admin.ListIndexesResponse]]: - r"""Return a callable for the list indexes method over gRPC. - - Lists the indexes that match the specified filters. - Datastore uses an eventually consistent query to fetch - the list of indexes and may occasionally return stale - results. - - Returns: - Callable[[~.ListIndexesRequest], - Awaitable[~.ListIndexesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_indexes' not in self._stubs: - self._stubs['list_indexes'] = self.grpc_channel.unary_unary( - '/google.datastore.admin.v1.DatastoreAdmin/ListIndexes', - request_serializer=datastore_admin.ListIndexesRequest.serialize, - response_deserializer=datastore_admin.ListIndexesResponse.deserialize, - ) - return self._stubs['list_indexes'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'DatastoreAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py deleted file mode 100644 index 84775245..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/services/datastore_admin/transports/rest.py +++ /dev/null @@ -1,1271 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.cloud.datastore_admin_v1.types import datastore_admin -from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore - -from .base import DatastoreAdminTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class DatastoreAdminRestInterceptor: - """Interceptor for DatastoreAdmin. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DatastoreAdminRestTransport. - - .. code-block:: python - class MyCustomDatastoreAdminInterceptor(DatastoreAdminRestInterceptor): - def pre_create_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_index(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_index(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_export_entities(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_entities(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_index(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_index(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_import_entities(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_import_entities(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_indexes(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_indexes(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DatastoreAdminRestTransport(interceptor=MyCustomDatastoreAdminInterceptor()) - client = DatastoreAdminClient(transport=transport) - - - """ - def pre_create_index(self, request: datastore_admin.CreateIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore_admin.CreateIndexRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_create_index(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_index - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - def pre_delete_index(self, request: datastore_admin.DeleteIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore_admin.DeleteIndexRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_delete_index(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_index - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - def pre_export_entities(self, request: datastore_admin.ExportEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore_admin.ExportEntitiesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for export_entities - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_export_entities(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for export_entities - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - def pre_get_index(self, request: datastore_admin.GetIndexRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore_admin.GetIndexRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_index - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_get_index(self, response: index.Index) -> index.Index: - """Post-rpc interceptor for get_index - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - def pre_import_entities(self, request: datastore_admin.ImportEntitiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore_admin.ImportEntitiesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for import_entities - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_import_entities(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for import_entities - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - def pre_list_indexes(self, request: datastore_admin.ListIndexesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[datastore_admin.ListIndexesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_indexes - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_list_indexes(self, response: datastore_admin.ListIndexesResponse) -> datastore_admin.ListIndexesResponse: - """Post-rpc interceptor for list_indexes - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatastoreAdmin server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the DatastoreAdmin server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DatastoreAdminRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DatastoreAdminRestInterceptor - - -class DatastoreAdminRestTransport(DatastoreAdminTransport): - """REST backend transport for DatastoreAdmin. - - Google Cloud Datastore Admin API - - The Datastore Admin API provides several admin services for - Cloud Datastore. - - Concepts: Project, namespace, kind, and entity as defined in the - Google Cloud Datastore API. - - Operation: An Operation represents work being performed in the - background. - - EntityFilter: Allows specifying a subset of entities in a - project. This is specified as a combination of kinds and - namespaces (either or both of which may be all). - - Export/Import Service: - - - The Export/Import service provides the ability to copy all or - a subset of entities to/from Google Cloud Storage. - - Exported data may be imported into Cloud Datastore for any - Google Cloud Platform project. It is not restricted to the - export source project. It is possible to export from one - project and then import into another. - - Exported data can also be loaded into Google BigQuery for - analysis. - - Exports and imports are performed asynchronously. An Operation - resource is created for each export/import. The state - (including any errors encountered) of the export/import may be - queried via the Operation resource. - - Index Service: - - - The index service manages Cloud Datastore composite indexes. - - Index creation and deletion are performed asynchronously. An - Operation resource is created for each such asynchronous - operation. The state of the operation (including any errors - encountered) may be queried via the Operation resource. - - Operation Service: - - - The Operations collection provides a record of actions - performed for the specified project (including any operations - in progress). Operations are not created directly but through - calls on other collections or resources. - - An operation that is not yet done may be cancelled. The - request to cancel is asynchronous and the operation may - continue to run for some time after the request to cancel is - made. - - An operation that is done may be deleted so that it is no - longer listed as part of the Operation collection. - - ListOperations returns all pending operations, but not - completed operations. - - Operations are created by service DatastoreAdmin, but are - accessed via service google.longrunning.Operations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'datastore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DatastoreAdminRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'datastore.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DatastoreAdminRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateIndex(DatastoreAdminRestStub): - def __hash__(self): - return hash("CreateIndex") - - def __call__(self, - request: datastore_admin.CreateIndexRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create index method over HTTP. - - Args: - request (~.datastore_admin.CreateIndexRequest): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/indexes', - 'body': 'index', - }, - ] - request, metadata = self._interceptor.pre_create_index(request, metadata) - pb_request = datastore_admin.CreateIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_index(resp) - return resp - - class _DeleteIndex(DatastoreAdminRestStub): - def __hash__(self): - return hash("DeleteIndex") - - def __call__(self, - request: datastore_admin.DeleteIndexRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete index method over HTTP. - - Args: - request (~.datastore_admin.DeleteIndexRequest): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/projects/{project_id}/indexes/{index_id}', - }, - ] - request, metadata = self._interceptor.pre_delete_index(request, metadata) - pb_request = datastore_admin.DeleteIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_index(resp) - return resp - - class _ExportEntities(DatastoreAdminRestStub): - def __hash__(self): - return hash("ExportEntities") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore_admin.ExportEntitiesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the export entities method over HTTP. - - Args: - request (~.datastore_admin.ExportEntitiesRequest): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:export', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_export_entities(request, metadata) - pb_request = datastore_admin.ExportEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_entities(resp) - return resp - - class _GetIndex(DatastoreAdminRestStub): - def __hash__(self): - return hash("GetIndex") - - def __call__(self, - request: datastore_admin.GetIndexRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> index.Index: - r"""Call the get index method over HTTP. - - Args: - request (~.datastore_admin.GetIndexRequest): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.index.Index: - Datastore composite index definition. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/indexes/{index_id}', - }, - ] - request, metadata = self._interceptor.pre_get_index(request, metadata) - pb_request = datastore_admin.GetIndexRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = index.Index() - pb_resp = index.Index.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_index(resp) - return resp - - class _ImportEntities(DatastoreAdminRestStub): - def __hash__(self): - return hash("ImportEntities") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: datastore_admin.ImportEntitiesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the import entities method over HTTP. - - Args: - request (~.datastore_admin.ImportEntitiesRequest): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}:import', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_import_entities(request, metadata) - pb_request = datastore_admin.ImportEntitiesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_import_entities(resp) - return resp - - class _ListIndexes(DatastoreAdminRestStub): - def __hash__(self): - return hash("ListIndexes") - - def __call__(self, - request: datastore_admin.ListIndexesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> datastore_admin.ListIndexesResponse: - r"""Call the list indexes method over HTTP. - - Args: - request (~.datastore_admin.ListIndexesRequest): - The request object. The request for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.datastore_admin.ListIndexesResponse: - The response for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/indexes', - }, - ] - request, metadata = self._interceptor.pre_list_indexes(request, metadata) - pb_request = datastore_admin.ListIndexesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datastore_admin.ListIndexesResponse() - pb_resp = datastore_admin.ListIndexesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_indexes(resp) - return resp - - @property - def create_index(self) -> Callable[ - [datastore_admin.CreateIndexRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_index(self) -> Callable[ - [datastore_admin.DeleteIndexRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_entities(self) -> Callable[ - [datastore_admin.ExportEntitiesRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportEntities(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_index(self) -> Callable[ - [datastore_admin.GetIndexRequest], - index.Index]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIndex(self._session, self._host, self._interceptor) # type: ignore - - @property - def import_entities(self) -> Callable[ - [datastore_admin.ImportEntitiesRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ImportEntities(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_indexes(self) -> Callable[ - [datastore_admin.ListIndexesRequest], - datastore_admin.ListIndexesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListIndexes(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(DatastoreAdminRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/operations/*}:cancel', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(DatastoreAdminRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(DatastoreAdminRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(DatastoreAdminRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/operations', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DatastoreAdminRestTransport', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/__init__.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/__init__.py deleted file mode 100644 index ae34c30d..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .datastore_admin import ( - CommonMetadata, - CreateIndexRequest, - DatastoreFirestoreMigrationMetadata, - DeleteIndexRequest, - EntityFilter, - ExportEntitiesMetadata, - ExportEntitiesRequest, - ExportEntitiesResponse, - GetIndexRequest, - ImportEntitiesMetadata, - ImportEntitiesRequest, - IndexOperationMetadata, - ListIndexesRequest, - ListIndexesResponse, - Progress, - OperationType, -) -from .index import ( - Index, -) -from .migration import ( - MigrationProgressEvent, - MigrationStateEvent, - MigrationState, - MigrationStep, -) - -__all__ = ( - 'CommonMetadata', - 'CreateIndexRequest', - 'DatastoreFirestoreMigrationMetadata', - 'DeleteIndexRequest', - 'EntityFilter', - 'ExportEntitiesMetadata', - 'ExportEntitiesRequest', - 'ExportEntitiesResponse', - 'GetIndexRequest', - 'ImportEntitiesMetadata', - 'ImportEntitiesRequest', - 'IndexOperationMetadata', - 'ListIndexesRequest', - 'ListIndexesResponse', - 'Progress', - 'OperationType', - 'Index', - 'MigrationProgressEvent', - 'MigrationStateEvent', - 'MigrationState', - 'MigrationStep', -) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/datastore_admin.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/datastore_admin.py deleted file mode 100644 index c2203f05..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ /dev/null @@ -1,639 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datastore_admin_v1.types import index as gda_index -from google.cloud.datastore_admin_v1.types import migration -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.datastore.admin.v1', - manifest={ - 'OperationType', - 'CommonMetadata', - 'Progress', - 'ExportEntitiesRequest', - 'ImportEntitiesRequest', - 'ExportEntitiesResponse', - 'ExportEntitiesMetadata', - 'ImportEntitiesMetadata', - 'EntityFilter', - 'CreateIndexRequest', - 'DeleteIndexRequest', - 'GetIndexRequest', - 'ListIndexesRequest', - 'ListIndexesResponse', - 'IndexOperationMetadata', - 'DatastoreFirestoreMigrationMetadata', - }, -) - - -class OperationType(proto.Enum): - r"""Operation types. - - Values: - OPERATION_TYPE_UNSPECIFIED (0): - Unspecified. - EXPORT_ENTITIES (1): - ExportEntities. - IMPORT_ENTITIES (2): - ImportEntities. - CREATE_INDEX (3): - CreateIndex. - DELETE_INDEX (4): - DeleteIndex. - """ - OPERATION_TYPE_UNSPECIFIED = 0 - EXPORT_ENTITIES = 1 - IMPORT_ENTITIES = 2 - CREATE_INDEX = 3 - DELETE_INDEX = 4 - - -class CommonMetadata(proto.Message): - r"""Metadata common to all Datastore Admin operations. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time that work began on the operation. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time the operation ended, either - successfully or otherwise. - operation_type (google.cloud.datastore_admin_v1.types.OperationType): - The type of the operation. Can be used as a - filter in ListOperationsRequest. - labels (MutableMapping[str, str]): - The client-assigned labels which were - provided when the operation was created. May - also include additional labels. - state (google.cloud.datastore_admin_v1.types.CommonMetadata.State): - The current state of the Operation. - """ - class State(proto.Enum): - r"""The various possible states for an ongoing Operation. - - Values: - STATE_UNSPECIFIED (0): - Unspecified. - INITIALIZING (1): - Request is being prepared for processing. - PROCESSING (2): - Request is actively being processed. - CANCELLING (3): - Request is in the process of being cancelled - after user called - google.longrunning.Operations.CancelOperation on - the operation. - FINALIZING (4): - Request has been processed and is in its - finalization stage. - SUCCESSFUL (5): - Request has completed successfully. - FAILED (6): - Request has finished being processed, but - encountered an error. - CANCELLED (7): - Request has finished being cancelled after - user called - google.longrunning.Operations.CancelOperation. - """ - STATE_UNSPECIFIED = 0 - INITIALIZING = 1 - PROCESSING = 2 - CANCELLING = 3 - FINALIZING = 4 - SUCCESSFUL = 5 - FAILED = 6 - CANCELLED = 7 - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - operation_type: 'OperationType' = proto.Field( - proto.ENUM, - number=3, - enum='OperationType', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - - -class Progress(proto.Message): - r"""Measures the progress of a particular metric. - - Attributes: - work_completed (int): - The amount of work that has been completed. Note that this - may be greater than work_estimated. - work_estimated (int): - An estimate of how much work needs to be - performed. May be zero if the work estimate is - unavailable. - """ - - work_completed: int = proto.Field( - proto.INT64, - number=1, - ) - work_estimated: int = proto.Field( - proto.INT64, - number=2, - ) - - -class ExportEntitiesRequest(proto.Message): - r"""The request for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - - Attributes: - project_id (str): - Required. Project ID against which to make - the request. - labels (MutableMapping[str, str]): - Client-assigned labels. - entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): - Description of what data from the project is - included in the export. - output_url_prefix (str): - Required. Location for the export metadata and data files. - - The full resource URL of the external storage location. - Currently, only Google Cloud Storage is supported. So - output_url_prefix should be of the form: - ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` - is the name of the Cloud Storage bucket and - ``NAMESPACE_PATH`` is an optional Cloud Storage namespace - path (this is not a Cloud Datastore namespace). For more - information about Cloud Storage namespace paths, see `Object - name - considerations `__. - - The resulting files will be nested deeper than the specified - URL prefix. The final output URL will be provided in the - [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] - field. That value should be used for subsequent - ImportEntities operations. - - By nesting the data files deeper, the same Cloud Storage - bucket can be used in multiple ExportEntities operations - without conflict. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - entity_filter: 'EntityFilter' = proto.Field( - proto.MESSAGE, - number=3, - message='EntityFilter', - ) - output_url_prefix: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ImportEntitiesRequest(proto.Message): - r"""The request for - [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. - - Attributes: - project_id (str): - Required. Project ID against which to make - the request. - labels (MutableMapping[str, str]): - Client-assigned labels. - input_url (str): - Required. The full resource URL of the external storage - location. Currently, only Google Cloud Storage is supported. - So input_url should be of the form: - ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, - where ``BUCKET_NAME`` is the name of the Cloud Storage - bucket, ``NAMESPACE_PATH`` is an optional Cloud Storage - namespace path (this is not a Cloud Datastore namespace), - and ``OVERALL_EXPORT_METADATA_FILE`` is the metadata file - written by the ExportEntities operation. For more - information about Cloud Storage namespace paths, see `Object - name - considerations `__. - - For more information, see - [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. - entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): - Optionally specify which kinds/namespaces are to be - imported. If provided, the list must be a subset of the - EntityFilter used in creating the export, otherwise a - FAILED_PRECONDITION error will be returned. If no filter is - specified then all entities from the export are imported. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - input_url: str = proto.Field( - proto.STRING, - number=3, - ) - entity_filter: 'EntityFilter' = proto.Field( - proto.MESSAGE, - number=4, - message='EntityFilter', - ) - - -class ExportEntitiesResponse(proto.Message): - r"""The response for - [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. - - Attributes: - output_url (str): - Location of the output metadata file. This can be used to - begin an import into Cloud Datastore (this project or - another project). See - [google.datastore.admin.v1.ImportEntitiesRequest.input_url][google.datastore.admin.v1.ImportEntitiesRequest.input_url]. - Only present if the operation completed successfully. - """ - - output_url: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ExportEntitiesMetadata(proto.Message): - r"""Metadata for ExportEntities operations. - - Attributes: - common (google.cloud.datastore_admin_v1.types.CommonMetadata): - Metadata common to all Datastore Admin - operations. - progress_entities (google.cloud.datastore_admin_v1.types.Progress): - An estimate of the number of entities - processed. - progress_bytes (google.cloud.datastore_admin_v1.types.Progress): - An estimate of the number of bytes processed. - entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): - Description of which entities are being - exported. - output_url_prefix (str): - Location for the export metadata and data files. This will - be the same value as the - [google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix] - field. The final output location is provided in - [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. - """ - - common: 'CommonMetadata' = proto.Field( - proto.MESSAGE, - number=1, - message='CommonMetadata', - ) - progress_entities: 'Progress' = proto.Field( - proto.MESSAGE, - number=2, - message='Progress', - ) - progress_bytes: 'Progress' = proto.Field( - proto.MESSAGE, - number=3, - message='Progress', - ) - entity_filter: 'EntityFilter' = proto.Field( - proto.MESSAGE, - number=4, - message='EntityFilter', - ) - output_url_prefix: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ImportEntitiesMetadata(proto.Message): - r"""Metadata for ImportEntities operations. - - Attributes: - common (google.cloud.datastore_admin_v1.types.CommonMetadata): - Metadata common to all Datastore Admin - operations. - progress_entities (google.cloud.datastore_admin_v1.types.Progress): - An estimate of the number of entities - processed. - progress_bytes (google.cloud.datastore_admin_v1.types.Progress): - An estimate of the number of bytes processed. - entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): - Description of which entities are being - imported. - input_url (str): - The location of the import metadata file. This will be the - same value as the - [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] - field. - """ - - common: 'CommonMetadata' = proto.Field( - proto.MESSAGE, - number=1, - message='CommonMetadata', - ) - progress_entities: 'Progress' = proto.Field( - proto.MESSAGE, - number=2, - message='Progress', - ) - progress_bytes: 'Progress' = proto.Field( - proto.MESSAGE, - number=3, - message='Progress', - ) - entity_filter: 'EntityFilter' = proto.Field( - proto.MESSAGE, - number=4, - message='EntityFilter', - ) - input_url: str = proto.Field( - proto.STRING, - number=5, - ) - - -class EntityFilter(proto.Message): - r"""Identifies a subset of entities in a project. This is specified as - combinations of kinds and namespaces (either or both of which may be - all, as described in the following examples). Example usage: - - Entire project: kinds=[], namespace_ids=[] - - Kinds Foo and Bar in all namespaces: kinds=['Foo', 'Bar'], - namespace_ids=[] - - Kinds Foo and Bar only in the default namespace: kinds=['Foo', - 'Bar'], namespace_ids=[''] - - Kinds Foo and Bar in both the default and Baz namespaces: - kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] - - The entire Baz namespace: kinds=[], namespace_ids=['Baz'] - - Attributes: - kinds (MutableSequence[str]): - If empty, then this represents all kinds. - namespace_ids (MutableSequence[str]): - An empty list represents all namespaces. This - is the preferred usage for projects that don't - use namespaces. - - An empty string element represents the default - namespace. This should be used if the project - has data in non-default namespaces, but doesn't - want to include them. - Each namespace in this list must be unique. - """ - - kinds: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - namespace_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class CreateIndexRequest(proto.Message): - r"""The request for - [google.datastore.admin.v1.DatastoreAdmin.CreateIndex][google.datastore.admin.v1.DatastoreAdmin.CreateIndex]. - - Attributes: - project_id (str): - Project ID against which to make the request. - index (google.cloud.datastore_admin_v1.types.Index): - The index to create. The name and state - fields are output only and will be ignored. - Single property indexes cannot be created or - deleted. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - index: gda_index.Index = proto.Field( - proto.MESSAGE, - number=3, - message=gda_index.Index, - ) - - -class DeleteIndexRequest(proto.Message): - r"""The request for - [google.datastore.admin.v1.DatastoreAdmin.DeleteIndex][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex]. - - Attributes: - project_id (str): - Project ID against which to make the request. - index_id (str): - The resource ID of the index to delete. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - index_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetIndexRequest(proto.Message): - r"""The request for - [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. - - Attributes: - project_id (str): - Project ID against which to make the request. - index_id (str): - The resource ID of the index to get. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - index_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListIndexesRequest(proto.Message): - r"""The request for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - - Attributes: - project_id (str): - Project ID against which to make the request. - filter (str): - - page_size (int): - The maximum number of items to return. If - zero, then all results will be returned. - page_token (str): - The next_page_token value returned from a previous List - request, if any. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListIndexesResponse(proto.Message): - r"""The response for - [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. - - Attributes: - indexes (MutableSequence[google.cloud.datastore_admin_v1.types.Index]): - The indexes. - next_page_token (str): - The standard List next-page token. - """ - - @property - def raw_page(self): - return self - - indexes: MutableSequence[gda_index.Index] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gda_index.Index, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class IndexOperationMetadata(proto.Message): - r"""Metadata for Index operations. - - Attributes: - common (google.cloud.datastore_admin_v1.types.CommonMetadata): - Metadata common to all Datastore Admin - operations. - progress_entities (google.cloud.datastore_admin_v1.types.Progress): - An estimate of the number of entities - processed. - index_id (str): - The index resource ID that this operation is - acting on. - """ - - common: 'CommonMetadata' = proto.Field( - proto.MESSAGE, - number=1, - message='CommonMetadata', - ) - progress_entities: 'Progress' = proto.Field( - proto.MESSAGE, - number=2, - message='Progress', - ) - index_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DatastoreFirestoreMigrationMetadata(proto.Message): - r"""Metadata for Datastore to Firestore migration operations. - - The DatastoreFirestoreMigration operation is not started by the - end-user via an explicit "creation" method. This is an intentional - deviation from the LRO design pattern. - - This singleton resource can be accessed at: - "projects/{project_id}/operations/datastore-firestore-migration" - - Attributes: - migration_state (google.cloud.datastore_admin_v1.types.MigrationState): - The current state of migration from Cloud - Datastore to Cloud Firestore in Datastore mode. - migration_step (google.cloud.datastore_admin_v1.types.MigrationStep): - The current step of migration from Cloud - Datastore to Cloud Firestore in Datastore mode. - """ - - migration_state: migration.MigrationState = proto.Field( - proto.ENUM, - number=1, - enum=migration.MigrationState, - ) - migration_step: migration.MigrationStep = proto.Field( - proto.ENUM, - number=2, - enum=migration.MigrationStep, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/index.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/index.py deleted file mode 100644 index c6787577..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/index.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.datastore.admin.v1', - manifest={ - 'Index', - }, -) - - -class Index(proto.Message): - r"""Datastore composite index definition. - - Attributes: - project_id (str): - Output only. Project ID. - index_id (str): - Output only. The resource ID of the index. - kind (str): - Required. The entity kind to which this index - applies. - ancestor (google.cloud.datastore_admin_v1.types.Index.AncestorMode): - Required. The index's ancestor mode. Must not be - ANCESTOR_MODE_UNSPECIFIED. - properties (MutableSequence[google.cloud.datastore_admin_v1.types.Index.IndexedProperty]): - Required. An ordered sequence of property names and their - index attributes. - - Requires: - - - A maximum of 100 properties. - state (google.cloud.datastore_admin_v1.types.Index.State): - Output only. The state of the index. - """ - class AncestorMode(proto.Enum): - r"""For an ordered index, specifies whether each of the entity's - ancestors will be included. - - Values: - ANCESTOR_MODE_UNSPECIFIED (0): - The ancestor mode is unspecified. - NONE (1): - Do not include the entity's ancestors in the - index. - ALL_ANCESTORS (2): - Include all the entity's ancestors in the - index. - """ - ANCESTOR_MODE_UNSPECIFIED = 0 - NONE = 1 - ALL_ANCESTORS = 2 - - class Direction(proto.Enum): - r"""The direction determines how a property is indexed. - - Values: - DIRECTION_UNSPECIFIED (0): - The direction is unspecified. - ASCENDING (1): - The property's values are indexed so as to - support sequencing in ascending order and also - query by <, >, <=, >=, and =. - DESCENDING (2): - The property's values are indexed so as to - support sequencing in descending order and also - query by <, >, <=, >=, and =. - """ - DIRECTION_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - class State(proto.Enum): - r"""The possible set of states of an index. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - CREATING (1): - The index is being created, and cannot be - used by queries. There is an active long-running - operation for the index. The index is updated - when writing an entity. - Some index data may exist. - READY (2): - The index is ready to be used. - The index is updated when writing an entity. - The index is fully populated from all stored - entities it applies to. - DELETING (3): - The index is being deleted, and cannot be - used by queries. There is an active long-running - operation for the index. The index is not - updated when writing an entity. Some index data - may exist. - ERROR (4): - The index was being created or deleted, but - something went wrong. The index cannot by used - by queries. There is no active long-running - operation for the index, and the most recently - finished long-running operation failed. The - index is not updated when writing an entity. - Some index data may exist. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - DELETING = 3 - ERROR = 4 - - class IndexedProperty(proto.Message): - r"""A property of an index. - - Attributes: - name (str): - Required. The property name to index. - direction (google.cloud.datastore_admin_v1.types.Index.Direction): - Required. The indexed property's direction. Must not be - DIRECTION_UNSPECIFIED. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - direction: 'Index.Direction' = proto.Field( - proto.ENUM, - number=2, - enum='Index.Direction', - ) - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - index_id: str = proto.Field( - proto.STRING, - number=3, - ) - kind: str = proto.Field( - proto.STRING, - number=4, - ) - ancestor: AncestorMode = proto.Field( - proto.ENUM, - number=5, - enum=AncestorMode, - ) - properties: MutableSequence[IndexedProperty] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=IndexedProperty, - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/migration.py b/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/migration.py deleted file mode 100644 index 7d8c992c..00000000 --- a/owl-bot-staging/datastore_admin/v1/google/cloud/datastore_admin_v1/types/migration.py +++ /dev/null @@ -1,200 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.datastore.admin.v1', - manifest={ - 'MigrationState', - 'MigrationStep', - 'MigrationStateEvent', - 'MigrationProgressEvent', - }, -) - - -class MigrationState(proto.Enum): - r"""States for a migration. - - Values: - MIGRATION_STATE_UNSPECIFIED (0): - Unspecified. - RUNNING (1): - The migration is running. - PAUSED (2): - The migration is paused. - COMPLETE (3): - The migration is complete. - """ - MIGRATION_STATE_UNSPECIFIED = 0 - RUNNING = 1 - PAUSED = 2 - COMPLETE = 3 - - -class MigrationStep(proto.Enum): - r"""Steps in a migration. - - Values: - MIGRATION_STEP_UNSPECIFIED (0): - Unspecified. - PREPARE (6): - Pre-migration: the database is prepared for - migration. - START (1): - Start of migration. - APPLY_WRITES_SYNCHRONOUSLY (7): - Writes are applied synchronously to at least - one replica. - COPY_AND_VERIFY (2): - Data is copied to Cloud Firestore and then - verified to match the data in Cloud Datastore. - REDIRECT_EVENTUALLY_CONSISTENT_READS (3): - Eventually-consistent reads are redirected to - Cloud Firestore. - REDIRECT_STRONGLY_CONSISTENT_READS (4): - Strongly-consistent reads are redirected to - Cloud Firestore. - REDIRECT_WRITES (5): - Writes are redirected to Cloud Firestore. - """ - MIGRATION_STEP_UNSPECIFIED = 0 - PREPARE = 6 - START = 1 - APPLY_WRITES_SYNCHRONOUSLY = 7 - COPY_AND_VERIFY = 2 - REDIRECT_EVENTUALLY_CONSISTENT_READS = 3 - REDIRECT_STRONGLY_CONSISTENT_READS = 4 - REDIRECT_WRITES = 5 - - -class MigrationStateEvent(proto.Message): - r"""An event signifying a change in state of a `migration from Cloud - Datastore to Cloud Firestore in Datastore - mode `__. - - Attributes: - state (google.cloud.datastore_admin_v1.types.MigrationState): - The new state of the migration. - """ - - state: 'MigrationState' = proto.Field( - proto.ENUM, - number=1, - enum='MigrationState', - ) - - -class MigrationProgressEvent(proto.Message): - r"""An event signifying the start of a new step in a `migration from - Cloud Datastore to Cloud Firestore in Datastore - mode `__. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - step (google.cloud.datastore_admin_v1.types.MigrationStep): - The step that is starting. - - An event with step set to ``START`` indicates that the - migration has been reverted back to the initial - pre-migration state. - prepare_step_details (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.PrepareStepDetails): - Details for the ``PREPARE`` step. - - This field is a member of `oneof`_ ``step_details``. - redirect_writes_step_details (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.RedirectWritesStepDetails): - Details for the ``REDIRECT_WRITES`` step. - - This field is a member of `oneof`_ ``step_details``. - """ - class ConcurrencyMode(proto.Enum): - r"""Concurrency modes for transactions in Cloud Firestore. - - Values: - CONCURRENCY_MODE_UNSPECIFIED (0): - Unspecified. - PESSIMISTIC (1): - Pessimistic concurrency. - OPTIMISTIC (2): - Optimistic concurrency. - OPTIMISTIC_WITH_ENTITY_GROUPS (3): - Optimistic concurrency with entity groups. - """ - CONCURRENCY_MODE_UNSPECIFIED = 0 - PESSIMISTIC = 1 - OPTIMISTIC = 2 - OPTIMISTIC_WITH_ENTITY_GROUPS = 3 - - class PrepareStepDetails(proto.Message): - r"""Details for the ``PREPARE`` step. - - Attributes: - concurrency_mode (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.ConcurrencyMode): - The concurrency mode this database will use when it reaches - the ``REDIRECT_WRITES`` step. - """ - - concurrency_mode: 'MigrationProgressEvent.ConcurrencyMode' = proto.Field( - proto.ENUM, - number=1, - enum='MigrationProgressEvent.ConcurrencyMode', - ) - - class RedirectWritesStepDetails(proto.Message): - r"""Details for the ``REDIRECT_WRITES`` step. - - Attributes: - concurrency_mode (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.ConcurrencyMode): - Ths concurrency mode for this database. - """ - - concurrency_mode: 'MigrationProgressEvent.ConcurrencyMode' = proto.Field( - proto.ENUM, - number=1, - enum='MigrationProgressEvent.ConcurrencyMode', - ) - - step: 'MigrationStep' = proto.Field( - proto.ENUM, - number=1, - enum='MigrationStep', - ) - prepare_step_details: PrepareStepDetails = proto.Field( - proto.MESSAGE, - number=2, - oneof='step_details', - message=PrepareStepDetails, - ) - redirect_writes_step_details: RedirectWritesStepDetails = proto.Field( - proto.MESSAGE, - number=3, - oneof='step_details', - message=RedirectWritesStepDetails, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/datastore_admin/v1/mypy.ini b/owl-bot-staging/datastore_admin/v1/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/datastore_admin/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/datastore_admin/v1/noxfile.py b/owl-bot-staging/datastore_admin/v1/noxfile.py deleted file mode 100644 index 75fc5c52..00000000 --- a/owl-bot-staging/datastore_admin/v1/noxfile.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12" -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-datastore' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/datastore_admin_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - -@nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): - """Run the unit test suite against pre-release versions of dependencies.""" - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/datastore_admin_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_create_index_async.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_create_index_async.py deleted file mode 100644 index 13f9e3eb..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_create_index_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_CreateIndex_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -async def sample_create_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.CreateIndexRequest( - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_CreateIndex_async] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_create_index_sync.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_create_index_sync.py deleted file mode 100644 index 1885023c..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_create_index_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_CreateIndex_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -def sample_create_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.CreateIndexRequest( - ) - - # Make the request - operation = client.create_index(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_CreateIndex_sync] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_delete_index_async.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_delete_index_async.py deleted file mode 100644 index e6e13db2..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_delete_index_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_DeleteIndex_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -async def sample_delete_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.DeleteIndexRequest( - ) - - # Make the request - operation = client.delete_index(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_DeleteIndex_async] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_delete_index_sync.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_delete_index_sync.py deleted file mode 100644 index d256224f..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_delete_index_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_DeleteIndex_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -def sample_delete_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.DeleteIndexRequest( - ) - - # Make the request - operation = client.delete_index(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_DeleteIndex_sync] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_export_entities_async.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_export_entities_async.py deleted file mode 100644 index 5c4ae127..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_export_entities_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_ExportEntities_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -async def sample_export_entities(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ExportEntitiesRequest( - project_id="project_id_value", - output_url_prefix="output_url_prefix_value", - ) - - # Make the request - operation = client.export_entities(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_ExportEntities_async] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_export_entities_sync.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_export_entities_sync.py deleted file mode 100644 index abe4dcd0..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_export_entities_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_ExportEntities_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -def sample_export_entities(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ExportEntitiesRequest( - project_id="project_id_value", - output_url_prefix="output_url_prefix_value", - ) - - # Make the request - operation = client.export_entities(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_ExportEntities_sync] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_get_index_async.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_get_index_async.py deleted file mode 100644 index 66de6f27..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_get_index_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_GetIndex_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -async def sample_get_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.GetIndexRequest( - ) - - # Make the request - response = await client.get_index(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_GetIndex_async] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_get_index_sync.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_get_index_sync.py deleted file mode 100644 index 17e5f6f4..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_get_index_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIndex -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_GetIndex_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -def sample_get_index(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.GetIndexRequest( - ) - - # Make the request - response = client.get_index(request=request) - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_GetIndex_sync] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_import_entities_async.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_import_entities_async.py deleted file mode 100644 index 70fccd2d..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_import_entities_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_ImportEntities_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -async def sample_import_entities(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ImportEntitiesRequest( - project_id="project_id_value", - input_url="input_url_value", - ) - - # Make the request - operation = client.import_entities(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_ImportEntities_async] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_import_entities_sync.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_import_entities_sync.py deleted file mode 100644 index a2f01e7f..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_import_entities_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportEntities -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_ImportEntities_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -def sample_import_entities(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ImportEntitiesRequest( - project_id="project_id_value", - input_url="input_url_value", - ) - - # Make the request - operation = client.import_entities(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_ImportEntities_sync] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_list_indexes_async.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_list_indexes_async.py deleted file mode 100644 index c2272fa4..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_list_indexes_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListIndexes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_ListIndexes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -async def sample_list_indexes(): - # Create a client - client = datastore_admin_v1.DatastoreAdminAsyncClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ListIndexesRequest( - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_ListIndexes_async] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_list_indexes_sync.py b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_list_indexes_sync.py deleted file mode 100644 index 689a14ff..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/datastore_v1_generated_datastore_admin_list_indexes_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListIndexes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datastore - - -# [START datastore_v1_generated_DatastoreAdmin_ListIndexes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datastore_admin_v1 - - -def sample_list_indexes(): - # Create a client - client = datastore_admin_v1.DatastoreAdminClient() - - # Initialize request argument(s) - request = datastore_admin_v1.ListIndexesRequest( - ) - - # Make the request - page_result = client.list_indexes(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datastore_v1_generated_DatastoreAdmin_ListIndexes_sync] diff --git a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/snippet_metadata_google.datastore.admin.v1.json b/owl-bot-staging/datastore_admin/v1/samples/generated_samples/snippet_metadata_google.datastore.admin.v1.json deleted file mode 100644 index eef34251..00000000 --- a/owl-bot-staging/datastore_admin/v1/samples/generated_samples/snippet_metadata_google.datastore.admin.v1.json +++ /dev/null @@ -1,997 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.datastore.admin.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-datastore", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient", - "shortName": "DatastoreAdminAsyncClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient.create_index", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.CreateIndex", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "CreateIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.CreateIndexRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_index" - }, - "description": "Sample for CreateIndex", - "file": "datastore_v1_generated_datastore_admin_create_index_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_CreateIndex_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_create_index_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient", - "shortName": "DatastoreAdminClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient.create_index", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.CreateIndex", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "CreateIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.CreateIndexRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_index" - }, - "description": "Sample for CreateIndex", - "file": "datastore_v1_generated_datastore_admin_create_index_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_CreateIndex_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_create_index_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient", - "shortName": "DatastoreAdminAsyncClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient.delete_index", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.DeleteIndex", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "DeleteIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.DeleteIndexRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_index" - }, - "description": "Sample for DeleteIndex", - "file": "datastore_v1_generated_datastore_admin_delete_index_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_DeleteIndex_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_delete_index_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient", - "shortName": "DatastoreAdminClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient.delete_index", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.DeleteIndex", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "DeleteIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.DeleteIndexRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_index" - }, - "description": "Sample for DeleteIndex", - "file": "datastore_v1_generated_datastore_admin_delete_index_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_DeleteIndex_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_delete_index_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient", - "shortName": "DatastoreAdminAsyncClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient.export_entities", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.ExportEntities", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "ExportEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.ExportEntitiesRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "labels", - "type": "MutableMapping[str, str]" - }, - { - "name": "entity_filter", - "type": "google.cloud.datastore_admin_v1.types.EntityFilter" - }, - { - "name": "output_url_prefix", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_entities" - }, - "description": "Sample for ExportEntities", - "file": "datastore_v1_generated_datastore_admin_export_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_ExportEntities_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_export_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient", - "shortName": "DatastoreAdminClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient.export_entities", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.ExportEntities", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "ExportEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.ExportEntitiesRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "labels", - "type": "MutableMapping[str, str]" - }, - { - "name": "entity_filter", - "type": "google.cloud.datastore_admin_v1.types.EntityFilter" - }, - { - "name": "output_url_prefix", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_entities" - }, - "description": "Sample for ExportEntities", - "file": "datastore_v1_generated_datastore_admin_export_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_ExportEntities_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_export_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient", - "shortName": "DatastoreAdminAsyncClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient.get_index", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.GetIndex", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "GetIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.GetIndexRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_admin_v1.types.Index", - "shortName": "get_index" - }, - "description": "Sample for GetIndex", - "file": "datastore_v1_generated_datastore_admin_get_index_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_GetIndex_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_get_index_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient", - "shortName": "DatastoreAdminClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient.get_index", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.GetIndex", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "GetIndex" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.GetIndexRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_admin_v1.types.Index", - "shortName": "get_index" - }, - "description": "Sample for GetIndex", - "file": "datastore_v1_generated_datastore_admin_get_index_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_GetIndex_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_get_index_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient", - "shortName": "DatastoreAdminAsyncClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient.import_entities", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.ImportEntities", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "ImportEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.ImportEntitiesRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "labels", - "type": "MutableMapping[str, str]" - }, - { - "name": "input_url", - "type": "str" - }, - { - "name": "entity_filter", - "type": "google.cloud.datastore_admin_v1.types.EntityFilter" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_entities" - }, - "description": "Sample for ImportEntities", - "file": "datastore_v1_generated_datastore_admin_import_entities_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_ImportEntities_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_import_entities_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient", - "shortName": "DatastoreAdminClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient.import_entities", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.ImportEntities", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "ImportEntities" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.ImportEntitiesRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "labels", - "type": "MutableMapping[str, str]" - }, - { - "name": "input_url", - "type": "str" - }, - { - "name": "entity_filter", - "type": "google.cloud.datastore_admin_v1.types.EntityFilter" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_entities" - }, - "description": "Sample for ImportEntities", - "file": "datastore_v1_generated_datastore_admin_import_entities_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_ImportEntities_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_import_entities_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient", - "shortName": "DatastoreAdminAsyncClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminAsyncClient.list_indexes", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.ListIndexes", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "ListIndexes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.ListIndexesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesAsyncPager", - "shortName": "list_indexes" - }, - "description": "Sample for ListIndexes", - "file": "datastore_v1_generated_datastore_admin_list_indexes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_ListIndexes_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_list_indexes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient", - "shortName": "DatastoreAdminClient" - }, - "fullName": "google.cloud.datastore_admin_v1.DatastoreAdminClient.list_indexes", - "method": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin.ListIndexes", - "service": { - "fullName": "google.datastore.admin.v1.DatastoreAdmin", - "shortName": "DatastoreAdmin" - }, - "shortName": "ListIndexes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datastore_admin_v1.types.ListIndexesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datastore_admin_v1.services.datastore_admin.pagers.ListIndexesPager", - "shortName": "list_indexes" - }, - "description": "Sample for ListIndexes", - "file": "datastore_v1_generated_datastore_admin_list_indexes_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datastore_v1_generated_DatastoreAdmin_ListIndexes_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datastore_v1_generated_datastore_admin_list_indexes_sync.py" - } - ] -} diff --git a/owl-bot-staging/datastore_admin/v1/scripts/fixup_datastore_admin_v1_keywords.py b/owl-bot-staging/datastore_admin/v1/scripts/fixup_datastore_admin_v1_keywords.py deleted file mode 100644 index 2f999e1e..00000000 --- a/owl-bot-staging/datastore_admin/v1/scripts/fixup_datastore_admin_v1_keywords.py +++ /dev/null @@ -1,181 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class datastore_adminCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_index': ('project_id', 'index', ), - 'delete_index': ('project_id', 'index_id', ), - 'export_entities': ('project_id', 'output_url_prefix', 'labels', 'entity_filter', ), - 'get_index': ('project_id', 'index_id', ), - 'import_entities': ('project_id', 'input_url', 'labels', 'entity_filter', ), - 'list_indexes': ('project_id', 'filter', 'page_size', 'page_token', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=datastore_adminCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the datastore_admin client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/datastore_admin/v1/setup.py b/owl-bot-staging/datastore_admin/v1/setup.py deleted file mode 100644 index d97ecce4..00000000 --- a/owl-bot-staging/datastore_admin/v1/setup.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-datastore' - - -description = "Google Cloud Datastore API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/datastore_admin/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastore" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.10.txt b/owl-bot-staging/datastore_admin/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.11.txt b/owl-bot-staging/datastore_admin/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.12.txt b/owl-bot-staging/datastore_admin/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.7.txt b/owl-bot-staging/datastore_admin/v1/testing/constraints-3.7.txt deleted file mode 100644 index b8a550c7..00000000 --- a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.19.5 diff --git a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.8.txt b/owl-bot-staging/datastore_admin/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.9.txt b/owl-bot-staging/datastore_admin/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed..00000000 --- a/owl-bot-staging/datastore_admin/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/datastore_admin/v1/tests/__init__.py b/owl-bot-staging/datastore_admin/v1/tests/__init__.py deleted file mode 100644 index 7b3de311..00000000 --- a/owl-bot-staging/datastore_admin/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore_admin/v1/tests/unit/__init__.py b/owl-bot-staging/datastore_admin/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de311..00000000 --- a/owl-bot-staging/datastore_admin/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de311..00000000 --- a/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/datastore_admin_v1/__init__.py b/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/datastore_admin_v1/__init__.py deleted file mode 100644 index 7b3de311..00000000 --- a/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/datastore_admin_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py deleted file mode 100644 index 8c8cf15a..00000000 --- a/owl-bot-staging/datastore_admin/v1/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ /dev/null @@ -1,4867 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.datastore_admin_v1.services.datastore_admin import DatastoreAdminAsyncClient -from google.cloud.datastore_admin_v1.services.datastore_admin import DatastoreAdminClient -from google.cloud.datastore_admin_v1.services.datastore_admin import pagers -from google.cloud.datastore_admin_v1.services.datastore_admin import transports -from google.cloud.datastore_admin_v1.types import datastore_admin -from google.cloud.datastore_admin_v1.types import index -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DatastoreAdminClient._get_default_mtls_endpoint(None) is None - assert DatastoreAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DatastoreAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DatastoreAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DatastoreAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DatastoreAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DatastoreAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DatastoreAdminClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DatastoreAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DatastoreAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DatastoreAdminClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DatastoreAdminClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DatastoreAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DatastoreAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DatastoreAdminClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DatastoreAdminClient._get_client_cert_source(None, False) is None - assert DatastoreAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DatastoreAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DatastoreAdminClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DatastoreAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DatastoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAdminClient)) -@mock.patch.object(DatastoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAdminAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE - default_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DatastoreAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DatastoreAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "always") == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatastoreAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DatastoreAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatastoreAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DatastoreAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DatastoreAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DatastoreAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DatastoreAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DatastoreAdminClient._get_universe_domain(None, None) == DatastoreAdminClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DatastoreAdminClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), - (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DatastoreAdminClient, "grpc"), - (DatastoreAdminAsyncClient, "grpc_asyncio"), - (DatastoreAdminClient, "rest"), -]) -def test_datastore_admin_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datastore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://datastore.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DatastoreAdminGrpcTransport, "grpc"), - (transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DatastoreAdminRestTransport, "rest"), -]) -def test_datastore_admin_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DatastoreAdminClient, "grpc"), - (DatastoreAdminAsyncClient, "grpc_asyncio"), - (DatastoreAdminClient, "rest"), -]) -def test_datastore_admin_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datastore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://datastore.googleapis.com' - ) - - -def test_datastore_admin_client_get_transport_class(): - transport = DatastoreAdminClient.get_transport_class() - available_transports = [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminRestTransport, - ] - assert transport in available_transports - - transport = DatastoreAdminClient.get_transport_class("grpc") - assert transport == transports.DatastoreAdminGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), - (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), -]) -@mock.patch.object(DatastoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAdminClient)) -@mock.patch.object(DatastoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAdminAsyncClient)) -def test_datastore_admin_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DatastoreAdminClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DatastoreAdminClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc", "true"), - (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc", "false"), - (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest", "true"), - (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest", "false"), -]) -@mock.patch.object(DatastoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAdminClient)) -@mock.patch.object(DatastoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAdminAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_datastore_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DatastoreAdminClient, DatastoreAdminAsyncClient -]) -@mock.patch.object(DatastoreAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreAdminClient)) -@mock.patch.object(DatastoreAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreAdminAsyncClient)) -def test_datastore_admin_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DatastoreAdminClient, DatastoreAdminAsyncClient -]) -@mock.patch.object(DatastoreAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAdminClient)) -@mock.patch.object(DatastoreAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatastoreAdminAsyncClient)) -def test_datastore_admin_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DatastoreAdminClient._DEFAULT_UNIVERSE - default_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DatastoreAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), - (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest"), -]) -def test_datastore_admin_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc", grpc_helpers), - (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DatastoreAdminClient, transports.DatastoreAdminRestTransport, "rest", None), -]) -def test_datastore_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_datastore_admin_client_client_options_from_dict(): - with mock.patch('google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DatastoreAdminClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc", grpc_helpers), - (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_datastore_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datastore.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - scopes=None, - default_host="datastore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.ExportEntitiesRequest, - dict, -]) -def test_export_entities(request_type, transport: str = 'grpc'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.export_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore_admin.ExportEntitiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - client.export_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() - - -def test_export_entities_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore_admin.ExportEntitiesRequest( - project_id='project_id_value', - output_url_prefix='output_url_prefix_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - client.export_entities(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest( - project_id='project_id_value', - output_url_prefix='output_url_prefix_value', - ) - -@pytest.mark.asyncio -async def test_export_entities_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ExportEntitiesRequest() - -@pytest.mark.asyncio -async def test_export_entities_async(transport: str = 'grpc_asyncio', request_type=datastore_admin.ExportEntitiesRequest): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore_admin.ExportEntitiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_entities_async_from_dict(): - await test_export_entities_async(request_type=dict) - - -def test_export_entities_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.ExportEntitiesRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_entities_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.ExportEntitiesRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.export_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -def test_export_entities_flattened(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.export_entities( - project_id='project_id_value', - labels={'key_value': 'value_value'}, - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - output_url_prefix='output_url_prefix_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].labels - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - arg = args[0].entity_filter - mock_val = datastore_admin.EntityFilter(kinds=['kinds_value']) - assert arg == mock_val - arg = args[0].output_url_prefix - mock_val = 'output_url_prefix_value' - assert arg == mock_val - - -def test_export_entities_flattened_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_entities( - datastore_admin.ExportEntitiesRequest(), - project_id='project_id_value', - labels={'key_value': 'value_value'}, - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - output_url_prefix='output_url_prefix_value', - ) - -@pytest.mark.asyncio -async def test_export_entities_flattened_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.export_entities( - project_id='project_id_value', - labels={'key_value': 'value_value'}, - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - output_url_prefix='output_url_prefix_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].labels - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - arg = args[0].entity_filter - mock_val = datastore_admin.EntityFilter(kinds=['kinds_value']) - assert arg == mock_val - arg = args[0].output_url_prefix - mock_val = 'output_url_prefix_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_export_entities_flattened_error_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.export_entities( - datastore_admin.ExportEntitiesRequest(), - project_id='project_id_value', - labels={'key_value': 'value_value'}, - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - output_url_prefix='output_url_prefix_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.ImportEntitiesRequest, - dict, -]) -def test_import_entities(request_type, transport: str = 'grpc'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.import_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore_admin.ImportEntitiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_import_entities_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - client.import_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() - - -def test_import_entities_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore_admin.ImportEntitiesRequest( - project_id='project_id_value', - input_url='input_url_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - client.import_entities(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest( - project_id='project_id_value', - input_url='input_url_value', - ) - -@pytest.mark.asyncio -async def test_import_entities_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.import_entities() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ImportEntitiesRequest() - -@pytest.mark.asyncio -async def test_import_entities_async(transport: str = 'grpc_asyncio', request_type=datastore_admin.ImportEntitiesRequest): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.import_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore_admin.ImportEntitiesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_import_entities_async_from_dict(): - await test_import_entities_async(request_type=dict) - - -def test_import_entities_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.ImportEntitiesRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.import_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_import_entities_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.ImportEntitiesRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.import_entities(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -def test_import_entities_flattened(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.import_entities( - project_id='project_id_value', - labels={'key_value': 'value_value'}, - input_url='input_url_value', - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].labels - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - arg = args[0].input_url - mock_val = 'input_url_value' - assert arg == mock_val - arg = args[0].entity_filter - mock_val = datastore_admin.EntityFilter(kinds=['kinds_value']) - assert arg == mock_val - - -def test_import_entities_flattened_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_entities( - datastore_admin.ImportEntitiesRequest(), - project_id='project_id_value', - labels={'key_value': 'value_value'}, - input_url='input_url_value', - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - ) - -@pytest.mark.asyncio -async def test_import_entities_flattened_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entities), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.import_entities( - project_id='project_id_value', - labels={'key_value': 'value_value'}, - input_url='input_url_value', - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].labels - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - arg = args[0].input_url - mock_val = 'input_url_value' - assert arg == mock_val - arg = args[0].entity_filter - mock_val = datastore_admin.EntityFilter(kinds=['kinds_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_import_entities_flattened_error_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.import_entities( - datastore_admin.ImportEntitiesRequest(), - project_id='project_id_value', - labels={'key_value': 'value_value'}, - input_url='input_url_value', - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.CreateIndexRequest, - dict, -]) -def test_create_index(request_type, transport: str = 'grpc'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore_admin.CreateIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - client.create_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest() - - -def test_create_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore_admin.CreateIndexRequest( - project_id='project_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - client.create_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest( - project_id='project_id_value', - ) - -@pytest.mark.asyncio -async def test_create_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.CreateIndexRequest() - -@pytest.mark.asyncio -async def test_create_index_async(transport: str = 'grpc_asyncio', request_type=datastore_admin.CreateIndexRequest): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore_admin.CreateIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_index_async_from_dict(): - await test_create_index_async(request_type=dict) - - -def test_create_index_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.CreateIndexRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_index_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.CreateIndexRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_index), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.DeleteIndexRequest, - dict, -]) -def test_delete_index(request_type, transport: str = 'grpc'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore_admin.DeleteIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - client.delete_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest() - - -def test_delete_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore_admin.DeleteIndexRequest( - project_id='project_id_value', - index_id='index_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - client.delete_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest( - project_id='project_id_value', - index_id='index_id_value', - ) - -@pytest.mark.asyncio -async def test_delete_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.DeleteIndexRequest() - -@pytest.mark.asyncio -async def test_delete_index_async(transport: str = 'grpc_asyncio', request_type=datastore_admin.DeleteIndexRequest): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore_admin.DeleteIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_index_async_from_dict(): - await test_delete_index_async(request_type=dict) - - -def test_delete_index_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.DeleteIndexRequest() - - request.project_id = 'project_id_value' - request.index_id = 'index_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&index_id=index_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_index_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.DeleteIndexRequest() - - request.project_id = 'project_id_value' - request.index_id = 'index_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_index), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&index_id=index_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.GetIndexRequest, - dict, -]) -def test_get_index(request_type, transport: str = 'grpc'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = index.Index( - project_id='project_id_value', - index_id='index_id_value', - kind='kind_value', - ancestor=index.Index.AncestorMode.NONE, - state=index.Index.State.CREATING, - ) - response = client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore_admin.GetIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.project_id == 'project_id_value' - assert response.index_id == 'index_id_value' - assert response.kind == 'kind_value' - assert response.ancestor == index.Index.AncestorMode.NONE - assert response.state == index.Index.State.CREATING - - -def test_get_index_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - client.get_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() - - -def test_get_index_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore_admin.GetIndexRequest( - project_id='project_id_value', - index_id='index_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - client.get_index(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest( - project_id='project_id_value', - index_id='index_id_value', - ) - -@pytest.mark.asyncio -async def test_get_index_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index( - project_id='project_id_value', - index_id='index_id_value', - kind='kind_value', - ancestor=index.Index.AncestorMode.NONE, - state=index.Index.State.CREATING, - )) - response = await client.get_index() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.GetIndexRequest() - -@pytest.mark.asyncio -async def test_get_index_async(transport: str = 'grpc_asyncio', request_type=datastore_admin.GetIndexRequest): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(index.Index( - project_id='project_id_value', - index_id='index_id_value', - kind='kind_value', - ancestor=index.Index.AncestorMode.NONE, - state=index.Index.State.CREATING, - )) - response = await client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore_admin.GetIndexRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.project_id == 'project_id_value' - assert response.index_id == 'index_id_value' - assert response.kind == 'kind_value' - assert response.ancestor == index.Index.AncestorMode.NONE - assert response.state == index.Index.State.CREATING - - -@pytest.mark.asyncio -async def test_get_index_async_from_dict(): - await test_get_index_async(request_type=dict) - - -def test_get_index_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.GetIndexRequest() - - request.project_id = 'project_id_value' - request.index_id = 'index_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - call.return_value = index.Index() - client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&index_id=index_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_index_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.GetIndexRequest() - - request.project_id = 'project_id_value' - request.index_id = 'index_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_index), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) - await client.get_index(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&index_id=index_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.ListIndexesRequest, - dict, -]) -def test_list_indexes(request_type, transport: str = 'grpc'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datastore_admin.ListIndexesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datastore_admin.ListIndexesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_indexes_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - client.list_indexes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() - - -def test_list_indexes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datastore_admin.ListIndexesRequest( - project_id='project_id_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - client.list_indexes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest( - project_id='project_id_value', - filter='filter_value', - page_token='page_token_value', - ) - -@pytest.mark.asyncio -async def test_list_indexes_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore_admin.ListIndexesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_indexes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datastore_admin.ListIndexesRequest() - -@pytest.mark.asyncio -async def test_list_indexes_async(transport: str = 'grpc_asyncio', request_type=datastore_admin.ListIndexesRequest): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datastore_admin.ListIndexesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datastore_admin.ListIndexesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_indexes_async_from_dict(): - await test_list_indexes_async(request_type=dict) - - -def test_list_indexes_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.ListIndexesRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - call.return_value = datastore_admin.ListIndexesResponse() - client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_indexes_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datastore_admin.ListIndexesRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datastore_admin.ListIndexesResponse()) - await client.list_indexes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -def test_list_indexes_pager(transport_name: str = "grpc"): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - datastore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project_id', ''), - )), - ) - pager = client.list_indexes(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, index.Index) - for i in results) -def test_list_indexes_pages(transport_name: str = "grpc"): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - datastore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - pages = list(client.list_indexes(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_indexes_async_pager(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - datastore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_indexes(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, index.Index) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_indexes_async_pages(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_indexes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - datastore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_indexes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.ExportEntitiesRequest, - dict, -]) -def test_export_entities_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.export_entities(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_export_entities_rest_required_fields(request_type=datastore_admin.ExportEntitiesRequest): - transport_class = transports.DatastoreAdminRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["output_url_prefix"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["outputUrlPrefix"] = 'output_url_prefix_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "outputUrlPrefix" in jsonified_request - assert jsonified_request["outputUrlPrefix"] == 'output_url_prefix_value' - - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.export_entities(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_export_entities_rest_unset_required_fields(): - transport = transports.DatastoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.export_entities._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "outputUrlPrefix", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_entities_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "post_export_entities") as post, \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "pre_export_entities") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.ExportEntitiesRequest.pb(datastore_admin.ExportEntitiesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = datastore_admin.ExportEntitiesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.export_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_export_entities_rest_bad_request(transport: str = 'rest', request_type=datastore_admin.ExportEntitiesRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_entities(request) - - -def test_export_entities_rest_flattened(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - labels={'key_value': 'value_value'}, - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - output_url_prefix='output_url_prefix_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.export_entities(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}:export" % client.transport._host, args[1]) - - -def test_export_entities_rest_flattened_error(transport: str = 'rest'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_entities( - datastore_admin.ExportEntitiesRequest(), - project_id='project_id_value', - labels={'key_value': 'value_value'}, - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - output_url_prefix='output_url_prefix_value', - ) - - -def test_export_entities_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.ImportEntitiesRequest, - dict, -]) -def test_import_entities_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.import_entities(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_import_entities_rest_required_fields(request_type=datastore_admin.ImportEntitiesRequest): - transport_class = transports.DatastoreAdminRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["input_url"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["inputUrl"] = 'input_url_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_entities._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "inputUrl" in jsonified_request - assert jsonified_request["inputUrl"] == 'input_url_value' - - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.import_entities(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_import_entities_rest_unset_required_fields(): - transport = transports.DatastoreAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.import_entities._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "inputUrl", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_entities_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "post_import_entities") as post, \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "pre_import_entities") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.ImportEntitiesRequest.pb(datastore_admin.ImportEntitiesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = datastore_admin.ImportEntitiesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.import_entities(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_import_entities_rest_bad_request(transport: str = 'rest', request_type=datastore_admin.ImportEntitiesRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.import_entities(request) - - -def test_import_entities_rest_flattened(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - labels={'key_value': 'value_value'}, - input_url='input_url_value', - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.import_entities(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}:import" % client.transport._host, args[1]) - - -def test_import_entities_rest_flattened_error(transport: str = 'rest'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_entities( - datastore_admin.ImportEntitiesRequest(), - project_id='project_id_value', - labels={'key_value': 'value_value'}, - input_url='input_url_value', - entity_filter=datastore_admin.EntityFilter(kinds=['kinds_value']), - ) - - -def test_import_entities_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.CreateIndexRequest, - dict, -]) -def test_create_index_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request_init["index"] = {'project_id': 'project_id_value', 'index_id': 'index_id_value', 'kind': 'kind_value', 'ancestor': 1, 'properties': [{'name': 'name_value', 'direction': 1}], 'state': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = datastore_admin.CreateIndexRequest.meta.fields["index"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["index"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["index"][field])): - del request_init["index"][field][i][subfield] - else: - del request_init["index"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_index(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_index_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "post_create_index") as post, \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "pre_create_index") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.CreateIndexRequest.pb(datastore_admin.CreateIndexRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = datastore_admin.CreateIndexRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_index_rest_bad_request(transport: str = 'rest', request_type=datastore_admin.CreateIndexRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_index(request) - - -def test_create_index_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.DeleteIndexRequest, - dict, -]) -def test_delete_index_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'index_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_index(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_index_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "post_delete_index") as post, \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "pre_delete_index") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.DeleteIndexRequest.pb(datastore_admin.DeleteIndexRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = datastore_admin.DeleteIndexRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_index_rest_bad_request(transport: str = 'rest', request_type=datastore_admin.DeleteIndexRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'index_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_index(request) - - -def test_delete_index_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.GetIndexRequest, - dict, -]) -def test_get_index_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'index_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = index.Index( - project_id='project_id_value', - index_id='index_id_value', - kind='kind_value', - ancestor=index.Index.AncestorMode.NONE, - state=index.Index.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = index.Index.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_index(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.project_id == 'project_id_value' - assert response.index_id == 'index_id_value' - assert response.kind == 'kind_value' - assert response.ancestor == index.Index.AncestorMode.NONE - assert response.state == index.Index.State.CREATING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_index_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "post_get_index") as post, \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "pre_get_index") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.GetIndexRequest.pb(datastore_admin.GetIndexRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = index.Index.to_json(index.Index()) - - request = datastore_admin.GetIndexRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = index.Index() - - client.get_index(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_index_rest_bad_request(transport: str = 'rest', request_type=datastore_admin.GetIndexRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'index_id': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_index(request) - - -def test_get_index_rest_error(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - datastore_admin.ListIndexesRequest, - dict, -]) -def test_list_indexes_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datastore_admin.ListIndexesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datastore_admin.ListIndexesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_indexes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_indexes_rest_interceptors(null_interceptor): - transport = transports.DatastoreAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatastoreAdminRestInterceptor(), - ) - client = DatastoreAdminClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "post_list_indexes") as post, \ - mock.patch.object(transports.DatastoreAdminRestInterceptor, "pre_list_indexes") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = datastore_admin.ListIndexesRequest.pb(datastore_admin.ListIndexesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = datastore_admin.ListIndexesResponse.to_json(datastore_admin.ListIndexesResponse()) - - request = datastore_admin.ListIndexesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datastore_admin.ListIndexesResponse() - - client.list_indexes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_indexes_rest_bad_request(transport: str = 'rest', request_type=datastore_admin.ListIndexesRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_indexes(request) - - -def test_list_indexes_rest_pager(transport: str = 'rest'): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - index.Index(), - ], - next_page_token='abc', - ), - datastore_admin.ListIndexesResponse( - indexes=[], - next_page_token='def', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - ], - next_page_token='ghi', - ), - datastore_admin.ListIndexesResponse( - indexes=[ - index.Index(), - index.Index(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(datastore_admin.ListIndexesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1'} - - pager = client.list_indexes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, index.Index) - for i in results) - - pages = list(client.list_indexes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DatastoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatastoreAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DatastoreAdminClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastoreAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DatastoreAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, - transports.DatastoreAdminRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = DatastoreAdminClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DatastoreAdminGrpcTransport, - ) - -def test_datastore_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatastoreAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_datastore_admin_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DatastoreAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'export_entities', - 'import_entities', - 'create_index', - 'delete_index', - 'get_index', - 'list_indexes', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_datastore_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatastoreAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - quota_project_id="octopus", - ) - - -def test_datastore_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datastore_admin_v1.services.datastore_admin.transports.DatastoreAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatastoreAdminTransport() - adc.assert_called_once() - - -def test_datastore_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DatastoreAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, - ], -) -def test_datastore_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/datastore',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastoreAdminGrpcTransport, - transports.DatastoreAdminGrpcAsyncIOTransport, - transports.DatastoreAdminRestTransport, - ], -) -def test_datastore_admin_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DatastoreAdminGrpcTransport, grpc_helpers), - (transports.DatastoreAdminGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_datastore_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datastore.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', -), - scopes=["1", "2"], - default_host="datastore.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DatastoreAdminGrpcTransport, transports.DatastoreAdminGrpcAsyncIOTransport]) -def test_datastore_admin_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_datastore_admin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DatastoreAdminRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_datastore_admin_rest_lro_client(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_datastore_admin_host_no_port(transport_name): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datastore.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datastore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://datastore.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_datastore_admin_host_with_port(transport_name): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datastore.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datastore.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://datastore.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_datastore_admin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DatastoreAdminClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DatastoreAdminClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.export_entities._session - session2 = client2.transport.export_entities._session - assert session1 != session2 - session1 = client1.transport.import_entities._session - session2 = client2.transport.import_entities._session - assert session1 != session2 - session1 = client1.transport.create_index._session - session2 = client2.transport.create_index._session - assert session1 != session2 - session1 = client1.transport.delete_index._session - session2 = client2.transport.delete_index._session - assert session1 != session2 - session1 = client1.transport.get_index._session - session2 = client2.transport.get_index._session - assert session1 != session2 - session1 = client1.transport.list_indexes._session - session2 = client2.transport.list_indexes._session - assert session1 != session2 -def test_datastore_admin_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatastoreAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_datastore_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatastoreAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatastoreAdminGrpcTransport, transports.DatastoreAdminGrpcAsyncIOTransport]) -def test_datastore_admin_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatastoreAdminGrpcTransport, transports.DatastoreAdminGrpcAsyncIOTransport]) -def test_datastore_admin_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_datastore_admin_grpc_lro_client(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_datastore_admin_grpc_lro_async_client(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DatastoreAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DatastoreAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = DatastoreAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DatastoreAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DatastoreAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DatastoreAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = DatastoreAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DatastoreAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DatastoreAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DatastoreAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DatastoreAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DatastoreAdminTransport, '_prep_wrapped_messages') as prep: - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DatastoreAdminTransport, '_prep_wrapped_messages') as prep: - transport_class = DatastoreAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/operations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/operations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/operations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/operations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/operations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/operations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport), - (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - )